Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-06-20 13:43:29 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-06-20 13:43:29 +0300
commit3b1af5cc7ed2666ff18b718ce5d30fa5a2756674 (patch)
tree3bc4a40e0ee51ec27eabf917c537033c0c5b14d4 /spec/lib/gitlab
parent9bba14be3f2c211bf79e15769cd9b77bc73a13bc (diff)
Add latest changes from gitlab-org/gitlab@16-1-stable-eev16.1.0-rc42
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/access/branch_protection_spec.rb52
-rw-r--r--spec/lib/gitlab/alert_management/payload/prometheus_spec.rb14
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb5
-rw-r--r--spec/lib/gitlab/api_authentication/token_locator_spec.rb23
-rw-r--r--spec/lib/gitlab/asciidoc/include_processor_spec.rb150
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb106
-rw-r--r--spec/lib/gitlab/audit/auditor_spec.rb33
-rw-r--r--spec/lib/gitlab/audit/type/definition_spec.rb24
-rw-r--r--spec/lib/gitlab/auth/ldap/auth_hash_spec.rb12
-rw-r--r--spec/lib/gitlab/auth/saml/config_spec.rb26
-rw-r--r--spec/lib/gitlab/auth_spec.rb20
-rw-r--r--spec/lib/gitlab/avatar_cache_spec.rb62
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb245
-rw-r--r--spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb38
-rw-r--r--spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb68
-rw-r--r--spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb75
-rw-r--r--spec/lib/gitlab/background_migration/backfill_group_features_spec.rb39
-rw-r--r--spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb67
-rw-r--r--spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb57
-rw-r--r--spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb50
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb61
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb41
-rw-r--r--spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb197
-rw-r--r--spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb302
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb10
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb54
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb6
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb64
-rw-r--r--spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb232
-rw-r--r--spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb78
-rw-r--r--spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb148
-rw-r--r--spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb82
-rw-r--r--spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb28
-rw-r--r--spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb44
-rw-r--r--spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb71
-rw-r--r--spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb50
-rw-r--r--spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb93
-rw-r--r--spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb266
-rw-r--r--spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb54
-rw-r--r--spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb57
-rw-r--r--spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb174
-rw-r--r--spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb66
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb52
-rw-r--r--spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb40
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb68
-rw-r--r--spec/lib/gitlab/cache/json_cache_spec.rb72
-rw-r--r--spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb113
-rw-r--r--spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb76
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb19
-rw-r--r--spec/lib/gitlab/checks/diff_check_spec.rb83
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/artifact_file_reader_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/context/build_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/build/context/global_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb93
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/id_token_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config/external/file/artifact_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/config/external/rules_spec.rb132
-rw-r--r--spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb (renamed from spec/lib/gitlab/ci/config/external/interpolator_spec.rb)2
-rw-r--r--spec/lib/gitlab/ci/config/yaml/loader_spec.rb153
-rw-r--r--spec/lib/gitlab/ci/config/yaml/result_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/yaml_spec.rb78
-rw-r--r--spec/lib/gitlab/ci/jwt_v2_spec.rb74
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/project_config_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb44
-rw-r--r--spec/lib/gitlab/ci/status/build/factory_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/status/scheduled_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/status/success_warning_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb145
-rw-r--r--spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb30
-rw-r--r--spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb25
-rw-r--r--spec/lib/gitlab/container_repository/tags/cache_spec.rb4
-rw-r--r--spec/lib/gitlab/counters/buffered_counter_spec.rb75
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb14
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_creator_spec.rb11
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb11
-rw-r--r--spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb40
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb8
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb52
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status_spec.rb114
-rw-r--r--spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb34
-rw-r--r--spec/lib/gitlab/database/database_connection_info_spec.rb161
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb6
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_info_spec.rb26
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb192
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb (renamed from spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb)17
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb (renamed from spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb)26
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/write_ahead_log_spec.rb (renamed from spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb)19
-rw-r--r--spec/lib/gitlab/database/health_status/logger_spec.rb13
-rw-r--r--spec/lib/gitlab/database/health_status/signals_spec.rb40
-rw-r--r--spec/lib/gitlab/database/health_status_spec.rb172
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_spec.rb123
-rw-r--r--spec/lib/gitlab/database/lock_writes_manager_spec.rb44
-rw-r--r--spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb50
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb15
-rw-r--r--spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb39
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb4
-rw-r--r--spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb34
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb36
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb30
-rw-r--r--spec/lib/gitlab/database/pg_depend_spec.rb10
-rw-r--r--spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb10
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb12
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb49
-rw-r--r--spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb29
-rw-r--r--spec/lib/gitlab/database/reindexing/index_selection_spec.rb4
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb2
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb2
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb28
-rw-r--r--spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb42
-rw-r--r--spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb24
-rw-r--r--spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb25
-rw-r--r--spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb121
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb5
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb8
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb7
-rw-r--r--spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb7
-rw-r--r--spec/lib/gitlab/database/tables_locker_spec.rb25
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb122
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb16
-rw-r--r--spec/lib/gitlab/database_importers/default_organization_importer_spec.rb32
-rw-r--r--spec/lib/gitlab/database_spec.rb99
-rw-r--r--spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb34
-rw-r--r--spec/lib/gitlab/diff/formatters/file_formatter_spec.rb44
-rw-r--r--spec/lib/gitlab/diff/formatters/text_formatter_spec.rb3
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb5
-rw-r--r--spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb238
-rw-r--r--spec/lib/gitlab/diff/position_tracer_spec.rb21
-rw-r--r--spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb88
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb18
-rw-r--r--spec/lib/gitlab/email/reply_parser_spec.rb34
-rw-r--r--spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb44
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/cache_spec.rb2
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb8
-rw-r--r--spec/lib/gitlab/git/conflict/parser_spec.rb2
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb41
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb16
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb416
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb16
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb111
-rw-r--r--spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb107
-rw-r--r--spec/lib/gitlab/github_import/attachments_downloader_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb3
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb14
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb2
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb23
-rw-r--r--spec/lib/gitlab/graphql/generic_tracing_spec.rb50
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb15
-rw-r--r--spec/lib/gitlab/hotlinking_detector_spec.rb3
-rw-r--r--spec/lib/gitlab/http_spec.rb73
-rw-r--r--spec/lib/gitlab/import/errors_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml9
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb146
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb45
-rw-r--r--spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb24
-rw-r--r--spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml6
-rw-r--r--spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb6
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb11
-rw-r--r--spec/lib/gitlab/instrumentation/redis_spec.rb20
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb18
-rw-r--r--spec/lib/gitlab/internal_events_spec.rb65
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb2
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb551
-rw-r--r--spec/lib/gitlab/lets_encrypt/challenge_spec.rb2
-rw-r--r--spec/lib/gitlab/lets_encrypt/client_spec.rb4
-rw-r--r--spec/lib/gitlab/lets_encrypt/order_spec.rb2
-rw-r--r--spec/lib/gitlab/lets_encrypt_spec.rb2
-rw-r--r--spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb8
-rw-r--r--spec/lib/gitlab/markdown_cache/redis/extension_spec.rb12
-rw-r--r--spec/lib/gitlab/merge_requests/message_generator_spec.rb19
-rw-r--r--spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/rails_slis_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb31
-rw-r--r--spec/lib/gitlab/middleware/compressed_json_spec.rb24
-rw-r--r--spec/lib/gitlab/omniauth_initializer_spec.rb8
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb9
-rw-r--r--spec/lib/gitlab/patch/redis_cache_store_spec.rb141
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb17
-rw-r--r--spec/lib/gitlab/path_traversal_spec.rb185
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb50
-rw-r--r--spec/lib/gitlab/reactive_cache_set_cache_spec.rb36
-rw-r--r--spec/lib/gitlab/redis/chat_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/cluster_cache_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/cluster_util_spec.rb60
-rw-r--r--spec/lib/gitlab/redis/cross_slot_spec.rb124
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb24
-rw-r--r--spec/lib/gitlab/redis/rate_limiting_spec.rb6
-rw-r--r--spec/lib/gitlab/repository_cache/preloader_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_hash_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb87
-rw-r--r--spec/lib/gitlab/repository_size_error_message_spec.rb6
-rw-r--r--spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb12
-rw-r--r--spec/lib/gitlab/search/abuse_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/search/params_spec.rb2
-rw-r--r--spec/lib/gitlab/search_context/builder_spec.rb2
-rw-r--r--spec/lib/gitlab/search_results_spec.rb46
-rw-r--r--spec/lib/gitlab/sentence_spec.rb37
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb16
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb111
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb16
-rw-r--r--spec/lib/gitlab/silent_mode_spec.rb97
-rw-r--r--spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb63
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb4
-rw-r--r--spec/lib/gitlab/task_helpers_spec.rb119
-rw-r--r--spec/lib/gitlab/template/metrics_dashboard_template_spec.rb26
-rw-r--r--spec/lib/gitlab/tracking_spec.rb16
-rw-r--r--spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb18
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb34
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb24
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb25
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb20
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb6
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb158
-rw-r--r--spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb19
-rw-r--r--spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb43
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb37
-rw-r--r--spec/lib/gitlab/utils/markdown_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/sanitize_node_link_spec.rb69
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/utils_spec.rb178
-rw-r--r--spec/lib/gitlab/verify/ci_secure_files_spec.rb64
252 files changed, 6815 insertions, 5271 deletions
diff --git a/spec/lib/gitlab/access/branch_protection_spec.rb b/spec/lib/gitlab/access/branch_protection_spec.rb
index 44c30d1f596..5ab610dfc8f 100644
--- a/spec/lib/gitlab/access/branch_protection_spec.rb
+++ b/spec/lib/gitlab/access/branch_protection_spec.rb
@@ -7,10 +7,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#any?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
- Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
end
with_them do
@@ -20,10 +21,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#developer_can_push?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
- Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them do
@@ -35,10 +37,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#developer_can_merge?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
- Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them do
@@ -50,10 +53,11 @@ RSpec.describe Gitlab::Access::BranchProtection do
describe '#fully_protected?' do
where(:level, :result) do
- Gitlab::Access::PROTECTION_NONE | false
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
- Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | true
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them do
@@ -62,4 +66,20 @@ RSpec.describe Gitlab::Access::BranchProtection do
end
end
end
+
+ describe '#developer_can_initial_push?' do
+ where(:level, :result) do
+ Gitlab::Access::PROTECTION_NONE | false
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
+ Gitlab::Access::PROTECTION_FULL | false
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
+ end
+
+ with_them do
+ it do
+ expect(described_class.new(level).developer_can_initial_push?).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
index 6a4f35c01e3..8ead292c27a 100644
--- a/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
@@ -297,4 +297,18 @@ RSpec.describe Gitlab::AlertManagement::Payload::Prometheus do
it { is_expected.to be_nil }
end
end
+
+ describe '#source' do
+ subject { parsed_payload.source }
+
+ it { is_expected.to eq('Prometheus') }
+
+ context 'with alerting integration provided' do
+ before do
+ parsed_payload.integration = instance_double('::AlertManagement::HttpIntegration', name: 'INTEGRATION')
+ end
+
+ it { is_expected.to eq('INTEGRATION') }
+ end
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
index 122a94a39c2..261d587506f 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Analytics::CycleAnalytics::Average do
+RSpec.describe Gitlab::Analytics::CycleAnalytics::Average, feature_category: :value_stream_management do
let_it_be(:project) { create(:project) }
let_it_be(:issue_1) do
@@ -45,7 +45,8 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Average do
it { is_expected.to eq(nil) }
end
- context 'returns the average duration in seconds' do
+ context 'returns the average duration in seconds',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/413223' do
it { is_expected.to be_within(0.5).of(7.5.days.to_f) }
end
end
diff --git a/spec/lib/gitlab/api_authentication/token_locator_spec.rb b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
index 4b19a3d5846..9b33d443960 100644
--- a/spec/lib/gitlab/api_authentication/token_locator_spec.rb
+++ b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::APIAuthentication::TokenLocator do
+RSpec.describe Gitlab::APIAuthentication::TokenLocator, feature_category: :system_access do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
@@ -157,6 +157,27 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
end
end
+ context 'with :http_header' do
+ let(:type) { { http_header: 'Api-Key' } }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Api-Key' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
context 'with :token_param' do
let(:type) { :token_param }
diff --git a/spec/lib/gitlab/asciidoc/include_processor_spec.rb b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
index 5c225575965..0c86c191abc 100644
--- a/spec/lib/gitlab/asciidoc/include_processor_spec.rb
+++ b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
@@ -18,32 +18,174 @@ RSpec.describe Gitlab::Asciidoc::IncludeProcessor do
let(:max_includes) { 10 }
let(:reader) { Asciidoctor::PreprocessorReader.new(document, lines, 'file.adoc') }
+
let(:document) { Asciidoctor::Document.new(lines) }
subject(:processor) { described_class.new(processor_context) }
let(:a_blob) { double(:Blob, readable_text?: true, data: a_data) }
- let(:a_data) { StringIO.new('include::b.adoc[]') }
+ let(:a_data) { 'include::b.adoc[]' }
- let(:lines) { [':max-include-depth: 1000'] + Array.new(10, 'include::a.adoc[]') }
+ let(:directives) { [':max-include-depth: 1000'] }
+ let(:lines) { directives + Array.new(10, 'include::a.adoc[]') }
before do
+ allow(project.repository).to receive(:blob_at).with(ref, anything).and_return(nil)
allow(project.repository).to receive(:blob_at).with(ref, 'a.adoc').and_return(a_blob)
end
+ describe 'read_lines' do
+ let(:result) { processor.send(:read_lines, filename, selector) }
+ let(:selector) { nil }
+
+ context 'when reading a file in the repository' do
+ let(:filename) { 'a.adoc' }
+
+ it 'returns the blob contents' do
+ expect(result).to match_array([a_data])
+ end
+
+ context 'when the blob does not exist' do
+ let(:filename) { 'this-file-does-not-exist' }
+
+ it 'raises NoData' do
+ expect { result }.to raise_error(described_class::NoData)
+ end
+ end
+
+ context 'when there is a selector' do
+ let(:a_data) { %w[a b c d].join("\n") }
+ let(:selector) { ->(_, lineno) { lineno.odd? } }
+
+ it 'selects the lines' do
+ expect(result).to eq %W[a\n c\n]
+ end
+ end
+
+ it 'allows at most N blob includes' do
+ max_includes.times do
+ processor.send(:read_lines, filename, selector)
+ end
+
+ expect(processor.send(:include_allowed?, 'anything', reader)).to be_falsey
+ end
+ end
+
+ context 'when reading content from a URL' do
+ let(:filename) { 'http://example.org/file' }
+
+ it 'fetches the data using a GET request' do
+ stub_request(:get, filename).to_return(status: 200, body: 'something')
+
+ expect(result).to match_array(['something'])
+ end
+
+ context 'when the URI returns 404' do
+ before do
+ stub_request(:get, filename).to_return(status: 404, body: 'not found')
+ end
+
+ it 'raises NoData' do
+ expect { result }.to raise_error(described_class::NoData)
+ end
+ end
+
+ it 'allows at most N HTTP includes' do
+ stub_request(:get, filename).to_return(status: 200, body: 'something')
+
+ max_includes.times do
+ processor.send(:read_lines, filename, selector)
+ end
+
+ expect(processor.send(:include_allowed?, 'anything', reader)).to be_falsey
+ end
+
+ context 'when there is a selector' do
+ let(:http_body) { %w[x y z].join("\n") }
+ let(:selector) { ->(_, lineno) { lineno.odd? } }
+
+ it 'selects the lines' do
+ stub_request(:get, filename).to_return(status: 200, body: http_body)
+
+ expect(result).to eq %W[x\n z]
+ end
+ end
+ end
+ end
+
describe '#include_allowed?' do
+ context 'when allow-uri-read is nil' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100, 'allow-uri-read' => nil })
+ end
+
+ it 'allows http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_falsey
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_falsey
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
+ context 'when allow-uri-read is false' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100, 'allow-uri-read' => false })
+ end
+
+ it 'allows http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_falsey
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_falsey
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
+ context 'when allow-uri-read is true' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100, 'allow-uri-read' => true })
+ end
+
+ it 'allows http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_truthy
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_truthy
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
+ context 'without allow-uri-read' do
+ before do
+ allow(document).to receive(:attributes).and_return({ 'max-include-depth' => 100 })
+ end
+
+ it 'forbids http includes' do
+ expect(processor.send(:include_allowed?, 'http://example.com', reader)).to be_falsey
+ expect(processor.send(:include_allowed?, 'https://example.com', reader)).to be_falsey
+ end
+
+ it 'allows blob includes' do
+ expect(processor.send(:include_allowed?, 'a.blob', reader)).to be_truthy
+ end
+ end
+
it 'allows the first include' do
expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_truthy
end
it 'allows the Nth include' do
- (max_includes - 1).times { processor.send(:read_blob, ref, 'a.adoc') }
+ (max_includes - 1).times { processor.send(:read_lines, 'a.adoc', nil) }
expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_truthy
end
it 'disallows the Nth + 1 include' do
- max_includes.times { processor.send(:read_blob, ref, 'a.adoc') }
+ max_includes.times { processor.send(:read_lines, 'a.adoc', nil) }
expect(processor.send(:include_allowed?, 'foo.adoc', reader)).to be_falsey
end
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 31e575e0466..a43f08db659 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -20,7 +20,7 @@ module Gitlab
expected_asciidoc_opts = {
safe: :secure,
backend: :gitlab_html5,
- attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }),
+ attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil, "allow-uri-read" => false }),
extensions: be_a(Proc)
}
@@ -35,7 +35,7 @@ module Gitlab
expected_asciidoc_opts = {
safe: :secure,
backend: :gitlab_html5,
- attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }),
+ attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil, "allow-uri-read" => false }),
extensions: be_a(Proc)
}
@@ -730,6 +730,19 @@ module Gitlab
include_examples 'invalid include'
end
+ context 'with a URI that returns 404' do
+ let(:include_path) { 'https://example.com/some_file.adoc' }
+
+ before do
+ stub_request(:get, include_path).to_return(status: 404, body: 'not found')
+ allow_any_instance_of(ApplicationSetting).to receive(:wiki_asciidoc_allow_uri_includes).and_return(true)
+ end
+
+ it 'renders Unresolved directive placeholder' do
+ is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>")
+ end
+ end
+
context 'with path to a textual file' do
let(:include_path) { 'sample.adoc' }
@@ -804,6 +817,59 @@ module Gitlab
end
end
+ describe 'the effect of max-includes' do
+ before do
+ create_file 'doc/preface.adoc', 'source: preface'
+ create_file 'doc/chapter-1.adoc', 'source: chapter-1'
+ create_file 'license.adoc', 'source: license'
+ stub_request(:get, 'https://example.com/some_file.adoc')
+ .to_return(status: 200, body: 'source: interwebs')
+ stub_request(:get, 'https://example.com/other_file.adoc')
+ .to_return(status: 200, body: 'source: intertubes')
+ allow_any_instance_of(ApplicationSetting).to receive(:wiki_asciidoc_allow_uri_includes).and_return(true)
+ end
+
+ let(:input) do
+ <<~ADOC
+ Source: requested file
+
+ include::doc/preface.adoc[]
+ include::https://example.com/some_file.adoc[]
+ include::doc/chapter-1.adoc[]
+ include::https://example.com/other_file.adoc[]
+ include::license.adoc[]
+ ADOC
+ end
+
+ it 'includes the content of all sources' do
+ expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip
+ Source: requested file
+ source: preface
+ source: interwebs
+ source: chapter-1
+ source: intertubes
+ source: license
+ ADOC
+ end
+
+ context 'when the document includes more than MAX_INCLUDES' do
+ before do
+ stub_const("#{described_class}::MAX_INCLUDES", 2)
+ end
+
+ it 'includes only the content of the first 2 sources' do
+ expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip
+ Source: requested file
+ source: preface
+ source: interwebs
+ doc/chapter-1.adoc
+ https://example.com/other_file.adoc
+ license.adoc
+ ADOC
+ end
+ end
+ end
+
context 'recursive includes with relative paths' do
let(:input) do
<<~ADOC
@@ -811,29 +877,53 @@ module Gitlab
include::doc/README.adoc[]
- include::license.adoc[]
+ include::https://example.com/some_file.adoc[]
+
+ include::license.adoc[lines=1]
ADOC
end
before do
+ stub_request(:get, 'https://example.com/some_file.adoc')
+ .to_return(status: 200, body: <<~ADOC)
+ Source: some file from Example.com
+
+ include::https://example.com/other_file[lines=1..2]
+
+ End some file from Example.com
+ ADOC
+
+ stub_request(:get, 'https://example.com/other_file')
+ .to_return(status: 200, body: <<~ADOC)
+ Source: other file from Example.com
+ Other file line 2
+ Other file line 3
+ ADOC
+
create_file 'doc/README.adoc', <<~ADOC
Source: doc/README.adoc
- include::../license.adoc[]
+ include::../license.adoc[lines=1;3]
include::api/hello.adoc[]
ADOC
create_file 'license.adoc', <<~ADOC
Source: license.adoc
+ License content
+ License end
ADOC
create_file 'doc/api/hello.adoc', <<~ADOC
Source: doc/api/hello.adoc
- include::./common.adoc[]
+ include::./common.adoc[lines=2..3]
ADOC
create_file 'doc/api/common.adoc', <<~ADOC
+ Common start
Source: doc/api/common.adoc
+ Common end
ADOC
+
+ allow_any_instance_of(ApplicationSetting).to receive(:wiki_asciidoc_allow_uri_includes).and_return(true)
end
it 'includes content of the included files recursively' do
@@ -841,8 +931,14 @@ module Gitlab
Source: requested file
Source: doc/README.adoc
Source: license.adoc
+ License end
Source: doc/api/hello.adoc
Source: doc/api/common.adoc
+ Common end
+ Source: some file from Example.com
+ Source: other file from Example.com
+ Other file line 2
+ End some file from Example.com
Source: license.adoc
ADOC
end
diff --git a/spec/lib/gitlab/audit/auditor_spec.rb b/spec/lib/gitlab/audit/auditor_spec.rb
index 2b3c8506440..386d4157e90 100644
--- a/spec/lib/gitlab/audit/auditor_spec.rb
+++ b/spec/lib/gitlab/audit/auditor_spec.rb
@@ -18,12 +18,45 @@ RSpec.describe Gitlab::Audit::Auditor, feature_category: :audit_events do
end
let(:logger) { instance_spy(Gitlab::AuditJsonLogger) }
+ let(:app_logger) { instance_spy(Gitlab::AppLogger) }
subject(:auditor) { described_class }
describe '.audit' do
let(:audit!) { auditor.audit(context) }
+ context 'when yaml definition is not defined' do
+ before do
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_return(false)
+ allow(Gitlab::AppLogger).to receive(:warn).and_return(app_logger)
+ end
+
+ it 'logs a warning when YAML is not defined' do
+ expected_warning = {
+ message: 'Logging audit events without an event type definition will be deprecated soon ' \
+ '(https://docs.gitlab.com/ee/development/audit_event_guide/#event-type-definitions)',
+ event_type: name
+ }
+
+ audit!
+
+ expect(Gitlab::AppLogger).to have_received(:warn).with(expected_warning)
+ end
+ end
+
+ context 'when yaml definition is defined' do
+ before do
+ allow(Gitlab::Audit::Type::Definition).to receive(:defined?).and_return(true)
+ allow(Gitlab::AppLogger).to receive(:warn).and_return(app_logger)
+ end
+
+ it 'does not log a warning when YAML is defined' do
+ audit!
+
+ expect(Gitlab::AppLogger).not_to have_received(:warn)
+ end
+ end
+
context 'when authentication event' do
it 'creates an authentication event' do
expect(AuthenticationEvent).to receive(:new).with(
diff --git a/spec/lib/gitlab/audit/type/definition_spec.rb b/spec/lib/gitlab/audit/type/definition_spec.rb
index d1d6b0d7a78..9c311677883 100644
--- a/spec/lib/gitlab/audit/type/definition_spec.rb
+++ b/spec/lib/gitlab/audit/type/definition_spec.rb
@@ -281,6 +281,30 @@ RSpec.describe Gitlab::Audit::Type::Definition do
end
end
+ describe '.names_with_category' do
+ let(:store1) { Dir.mktmpdir('path1') }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(store1, '**', '*.yml')
+ ]
+ )
+ end
+
+ subject { described_class.names_with_category }
+
+ after do
+ FileUtils.rm_rf(store1)
+ end
+
+ it "returns an array with just the event name and feature category" do
+ write_audit_event_type(store1, path, yaml_content)
+
+ expect(subject).to eq([{ event_name: :group_deploy_token_destroyed, feature_category: 'continuous_delivery' }])
+ end
+ end
+
def write_audit_event_type(store, path, content)
path = File.join(store, path)
dir = File.dirname(path)
diff --git a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
index e8008aeaf57..c19d890a703 100644
--- a/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/auth_hash_spec.rb
@@ -27,12 +27,12 @@ RSpec.describe Gitlab::Auth::Ldap::AuthHash do
end
let(:raw_info) do
- {
- uid: ['123456'],
- email: ['johnsmith@example.com'],
- cn: ['Smith, J.'],
- fullName: ['John Smith']
- }
+ Net::LDAP::Entry.new.tap do |entry|
+ entry['uid'] = ['123456']
+ entry['email'] = ['johnsmith@example.com']
+ entry['cn'] = ['Smith, J.']
+ entry['fullName'] = ['John Smith']
+ end
end
context "without overridden attributes" do
diff --git a/spec/lib/gitlab/auth/saml/config_spec.rb b/spec/lib/gitlab/auth/saml/config_spec.rb
index 12f5da48873..d657622c9f2 100644
--- a/spec/lib/gitlab/auth/saml/config_spec.rb
+++ b/spec/lib/gitlab/auth/saml/config_spec.rb
@@ -16,4 +16,30 @@ RSpec.describe Gitlab::Auth::Saml::Config do
it { is_expected.to eq(true) }
end
end
+
+ describe '#external_groups' do
+ let(:config_1) { described_class.new('saml1') }
+
+ let(:config_2) { described_class.new('saml2') }
+
+ before do
+ saml1_config = ActiveSupport::InheritableOptions.new(name: 'saml1', label: 'saml1', args: {
+ 'strategy_class' => 'OmniAuth::Strategies::SAML'
+ })
+
+ saml2_config = ActiveSupport::InheritableOptions.new(name: 'saml2',
+ external_groups: ['FreeLancers'],
+ label: 'saml2',
+ args: {
+ 'strategy_class' => 'OmniAuth::Strategies::SAML'
+ })
+
+ stub_omniauth_setting(enabled: true, auto_link_saml_user: true, providers: [saml1_config, saml2_config])
+ end
+
+ it "lists groups" do
+ expect(config_1.external_groups).to be_nil
+ expect(config_2.external_groups).to be_eql(['FreeLancers'])
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 36c87fb4557..b864dba58de 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -115,26 +115,6 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
end
- context 'with admin_mode_for_api feature flag disabled' do
- before do
- stub_feature_flags(admin_mode_for_api: false)
- end
-
- it 'contains all non-default scopes' do
- expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability]
- end
-
- it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
- user = build_stubbed(:user, admin: true)
-
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo]
- end
-
- it 'optional_scopes contains all non-default scopes' do
- expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability]
- end
- end
-
context 'registry_scopes' do
context 'when registry is disabled' do
before do
diff --git a/spec/lib/gitlab/avatar_cache_spec.rb b/spec/lib/gitlab/avatar_cache_spec.rb
index a57d811edaf..c959c5d80b2 100644
--- a/spec/lib/gitlab/avatar_cache_spec.rb
+++ b/spec/lib/gitlab/avatar_cache_spec.rb
@@ -62,52 +62,54 @@ RSpec.describe Gitlab::AvatarCache, :clean_gitlab_redis_cache do
end
describe "#delete_by_email" do
- shared_examples 'delete emails' do
- subject { described_class.delete_by_email(*emails) }
+ subject { described_class.delete_by_email(*emails) }
- before do
- perform_fetch
- end
+ before do
+ perform_fetch
+ end
- context "no emails, somehow" do
- let(:emails) { [] }
+ context "no emails, somehow" do
+ let(:emails) { [] }
- it { is_expected.to eq(0) }
- end
+ it { is_expected.to eq(0) }
+ end
- context "single email" do
- let(:emails) { "foo@bar.com" }
+ context "single email" do
+ let(:emails) { "foo@bar.com" }
- it "removes the email" do
- expect(read(key, "20:2:true")).to eq(avatar_path)
+ it "removes the email" do
+ expect(read(key, "20:2:true")).to eq(avatar_path)
- expect(subject).to eq(1)
+ expect(subject).to eq(1)
- expect(read(key, "20:2:true")).to eq(nil)
- end
+ expect(read(key, "20:2:true")).to eq(nil)
end
+ end
- context "multiple emails" do
- let(:emails) { ["foo@bar.com", "missing@baz.com"] }
+ context "multiple emails" do
+ let(:emails) { ["foo@bar.com", "missing@baz.com"] }
- it "removes the emails it finds" do
- expect(read(key, "20:2:true")).to eq(avatar_path)
+ it "removes the emails it finds" do
+ expect(read(key, "20:2:true")).to eq(avatar_path)
- expect(subject).to eq(1)
+ expect(subject).to eq(1)
- expect(read(key, "20:2:true")).to eq(nil)
- end
+ expect(read(key, "20:2:true")).to eq(nil)
end
end
- context 'when feature flag disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ context 'when deleting over 1000 emails' do
+ it 'deletes in batches of 1000' do
+ Gitlab::Redis::Cache.with do |redis|
+ if Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:unlink).and_call_original
+ end
+ end
- it_behaves_like 'delete emails'
+ described_class.delete_by_email(*(Array.new(1001) { |i| i }))
+ end
end
-
- it_behaves_like 'delete emails'
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb
deleted file mode 100644
index aaf8c124a83..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb
+++ /dev/null
@@ -1,245 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillCiQueuingTables, :migration,
- :suppress_gitlab_schemas_validate_connection, schema: 20220208115439 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:ci_cd_settings) { table(:project_ci_cd_settings) }
- let(:builds) { table(:ci_builds) }
- let(:queuing_entries) { table(:ci_pending_builds) }
- let(:tags) { table(:tags) }
- let(:taggings) { table(:taggings) }
-
- subject { described_class.new }
-
- describe '#perform' do
- let!(:namespace) do
- namespaces.create!(
- id: 10,
- name: 'namespace10',
- path: 'namespace10',
- traversal_ids: [10])
- end
-
- let!(:other_namespace) do
- namespaces.create!(
- id: 11,
- name: 'namespace11',
- path: 'namespace11',
- traversal_ids: [11])
- end
-
- let!(:project) do
- projects.create!(id: 5, namespace_id: 10, name: 'test1', path: 'test1')
- end
-
- let!(:ci_cd_setting) do
- ci_cd_settings.create!(id: 5, project_id: 5, group_runners_enabled: true)
- end
-
- let!(:other_project) do
- projects.create!(id: 7, namespace_id: 11, name: 'test2', path: 'test2')
- end
-
- let!(:other_ci_cd_setting) do
- ci_cd_settings.create!(id: 7, project_id: 7, group_runners_enabled: false)
- end
-
- let!(:another_project) do
- projects.create!(id: 9, namespace_id: 10, name: 'test3', path: 'test3', shared_runners_enabled: false)
- end
-
- let!(:ruby_tag) do
- tags.create!(id: 22, name: 'ruby')
- end
-
- let!(:postgres_tag) do
- tags.create!(id: 23, name: 'postgres')
- end
-
- it 'creates ci_pending_builds for all pending builds in range' do
- builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 51, status: :created, name: 'test2', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 52, status: :pending, name: 'test3', project_id: 5, protected: true, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 22)
- taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 23)
-
- builds.create!(id: 60, status: :pending, name: 'test1', project_id: 7, type: 'Ci::Build')
- builds.create!(id: 61, status: :running, name: 'test2', project_id: 7, protected: true, type: 'Ci::Build')
- builds.create!(id: 62, status: :pending, name: 'test3', project_id: 7, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 60, taggable_type: 'CommitStatus', tag_id: 23)
- taggings.create!(taggable_id: 62, taggable_type: 'CommitStatus', tag_id: 22)
-
- builds.create!(id: 70, status: :pending, name: 'test1', project_id: 9, protected: true, type: 'Ci::Build')
- builds.create!(id: 71, status: :failed, name: 'test2', project_id: 9, type: 'Ci::Build')
- builds.create!(id: 72, status: :pending, name: 'test3', project_id: 9, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 71, taggable_type: 'CommitStatus', tag_id: 22)
-
- subject.perform(1, 100)
-
- expect(queuing_entries.all).to contain_exactly(
- an_object_having_attributes(
- build_id: 50,
- project_id: 5,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: [10]),
- an_object_having_attributes(
- build_id: 52,
- project_id: 5,
- namespace_id: 10,
- protected: true,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: match_array([22, 23]),
- namespace_traversal_ids: [10]),
- an_object_having_attributes(
- build_id: 60,
- project_id: 7,
- namespace_id: 11,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [23],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 62,
- project_id: 7,
- namespace_id: 11,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [22],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 70,
- project_id: 9,
- namespace_id: 10,
- protected: true,
- instance_runners_enabled: false,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 72,
- project_id: 9,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: false,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: [])
- )
- end
-
- it 'skips builds that already have ci_pending_builds' do
- builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 51, status: :created, name: 'test2', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 52, status: :pending, name: 'test3', project_id: 5, protected: true, type: 'Ci::Build')
-
- taggings.create!(taggable_id: 50, taggable_type: 'CommitStatus', tag_id: 22)
- taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 23)
-
- queuing_entries.create!(build_id: 50, project_id: 5, namespace_id: 10)
-
- subject.perform(1, 100)
-
- expect(queuing_entries.all).to contain_exactly(
- an_object_having_attributes(
- build_id: 50,
- project_id: 5,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: false,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: []),
- an_object_having_attributes(
- build_id: 52,
- project_id: 5,
- namespace_id: 10,
- protected: true,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [23],
- namespace_traversal_ids: [10])
- )
- end
-
- it 'upserts values in case of conflicts' do
- builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- queuing_entries.create!(build_id: 50, project_id: 5, namespace_id: 10)
-
- build = described_class::Ci::Build.find(50)
- described_class::Ci::PendingBuild.upsert_from_build!(build)
-
- expect(queuing_entries.all).to contain_exactly(
- an_object_having_attributes(
- build_id: 50,
- project_id: 5,
- namespace_id: 10,
- protected: false,
- instance_runners_enabled: true,
- minutes_exceeded: false,
- tag_ids: [],
- namespace_traversal_ids: [10])
- )
- end
- end
-
- context 'Ci::Build' do
- describe '.each_batch' do
- let(:model) { described_class::Ci::Build }
-
- before do
- builds.create!(id: 1, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 2, status: :pending, name: 'test2', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 3, status: :pending, name: 'test3', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 4, status: :pending, name: 'test4', project_id: 5, type: 'Ci::Build')
- builds.create!(id: 5, status: :pending, name: 'test5', project_id: 5, type: 'Ci::Build')
- end
-
- it 'yields an ActiveRecord::Relation when a block is given' do
- model.each_batch do |relation|
- expect(relation).to be_a_kind_of(ActiveRecord::Relation)
- end
- end
-
- it 'yields a batch index as the second argument' do
- model.each_batch do |_, index|
- expect(index).to eq(1)
- end
- end
-
- it 'accepts a custom batch size' do
- amount = 0
-
- model.each_batch(of: 1) { amount += 1 }
-
- expect(amount).to eq(5)
- end
-
- it 'does not include ORDER BYs in the yielded relations' do
- model.each_batch do |relation|
- expect(relation.to_sql).not_to include('ORDER BY')
- end
- end
-
- it 'orders ascending' do
- ids = []
-
- model.each_batch(of: 1) { |rel| ids.concat(rel.ids) }
-
- expect(ids).to eq(ids.sort)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb
new file mode 100644
index 00000000000..1cfdf0ab09a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_code_suggestions_namespace_settings_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillCodeSuggestionsNamespaceSettings, schema: 20230518071251, feature_category: :code_suggestions do # rubocop:disable Layout/LineLength
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespace_settings_table) { table(:namespace_settings) }
+
+ let(:group_namespace) { namespaces_table.create!(name: 'Group#1', type: 'Group', path: 'group') }
+ let(:user_namespace) { namespaces_table.create!(name: 'User#1', type: 'User', path: 'user') }
+ let(:project_namespace) { namespaces_table.create!(name: 'Project#1', type: 'Project', path: 'project') }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: namespace_settings_table.minimum(:namespace_id),
+ end_id: namespace_settings_table.maximum(:namespace_id),
+ batch_table: :namespace_settings,
+ batch_column: :namespace_id,
+ sub_batch_size: 3,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ namespace_settings_table.create!(namespace_id: group_namespace.id, code_suggestions: false)
+ namespace_settings_table.create!(namespace_id: user_namespace.id, code_suggestions: true)
+ namespace_settings_table.create!(namespace_id: project_namespace.id, code_suggestions: true)
+ end
+
+ it 'updates the code suggestions values only for group and user namespace', :aggregate_failures do
+ expect { perform_migration }
+ .to change { namespace_settings_table.find_by_namespace_id(group_namespace.id).code_suggestions }.to(true)
+ .and change { namespace_settings_table.find_by_namespace_id(user_namespace.id).code_suggestions }.to(false)
+
+ expect(namespace_settings_table.find_by_namespace_id(project_namespace.id).code_suggestions).to eq(true)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
deleted file mode 100644
index 84611c88806..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests, :migration, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- context "for MRs with #draft? == true titles but draft attribute false" do
- let(:mr_ids) { merge_requests.all.collect(&:id) }
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: false,
- state_id: n
- )
- end
- end
- end
-
- it "updates all eligible draft merge request's draft field to true" do
- mr_count = merge_requests.all.count
-
- expect { subject.perform(mr_ids.first, mr_ids.last) }
- .to change { MergeRequest.where(draft: false).count }
- .from(mr_count).to(mr_count - draft_prefixes.length)
- end
-
- it "marks successful slices as completed" do
- expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last)
-
- subject.perform(mr_ids.first, mr_ids.last)
- end
-
- it_behaves_like 'marks background migration job records' do
- let!(:non_eligible_mrs) do
- Array.new(2) do
- create_merge_request(
- title: "Not a d-r-a-f-t 1",
- draft: false,
- state_id: 1
- )
- end
- end
-
- let(:arguments) { [non_eligible_mrs.first.id, non_eligible_mrs.last.id] }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb
deleted file mode 100644
index e6e10977143..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequestsWithCorrectedRegex,
- :migration, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- context "for MRs with #draft? == true titles but draft attribute false" do
- let(:mr_ids) { merge_requests.all.collect(&:id) }
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: false,
- state_id: n
- )
-
- create_merge_request(
- title: "This is a title with the #{prefix} in a weird spot",
- draft: false,
- state_id: n
- )
- end
- end
- end
-
- it "updates all eligible draft merge request's draft field to true" do
- mr_count = merge_requests.all.count
-
- expect { subject.perform(mr_ids.first, mr_ids.last) }
- .to change { MergeRequest.where(draft: false).count }
- .from(mr_count).to(mr_count - draft_prefixes.length)
- end
-
- it "marks successful slices as completed" do
- expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last)
-
- subject.perform(mr_ids.first, mr_ids.last)
- end
-
- it_behaves_like 'marks background migration job records' do
- let!(:non_eligible_mrs) do
- Array.new(2) do
- create_merge_request(
- title: "Not a d-r-a-f-t 1",
- draft: false,
- state_id: 1
- )
- end
- end
-
- let(:arguments) { [non_eligible_mrs.first.id, non_eligible_mrs.last.id] }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb
deleted file mode 100644
index 023d4b04e63..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillGroupFeatures, :migration, schema: 20220302114046 do
- let(:group_features) { table(:group_features) }
- let(:namespaces) { table(:namespaces) }
-
- subject do
- described_class.new(
- start_id: 1,
- end_id: 4,
- batch_table: :namespaces,
- batch_column: :id,
- sub_batch_size: 10,
- pause_ms: 0,
- job_arguments: [4],
- connection: ActiveRecord::Base.connection
- )
- end
-
- describe '#perform' do
- it 'creates settings for all group namespaces in range' do
- namespaces.create!(id: 1, name: 'group1', path: 'group1', type: 'Group')
- namespaces.create!(id: 2, name: 'user', path: 'user')
- namespaces.create!(id: 3, name: 'group2', path: 'group2', type: 'Group')
-
- # Checking that no error is raised if the group_feature for a group already exists
- namespaces.create!(id: 4, name: 'group3', path: 'group3', type: 'Group')
- group_features.create!(id: 1, group_id: 4)
- expect(group_features.count).to eq 1
-
- expect { subject.perform }.to change { group_features.count }.by(2)
-
- expect(group_features.count).to eq 3
- expect(group_features.all.pluck(:group_id)).to contain_exactly(1, 3, 4)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
deleted file mode 100644
index e6588644b4f..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsTypeNew, :migration, schema: 20220212120735 do
- let(:migration) { described_class.new }
- let(:integrations) { table(:integrations) }
-
- let(:namespaced_integrations) do
- Set.new(
- %w[
- Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
- Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Harbor Irker Jenkins Jira Mattermost
- MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
- Prometheus Pushover Redmine Shimo Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao
- Github GitlabSlackApplication
- ]).freeze
- end
-
- before do
- integrations.connection.execute 'ALTER TABLE integrations DISABLE TRIGGER "trigger_type_new_on_insert"'
-
- namespaced_integrations.each_with_index do |type, i|
- integrations.create!(id: i + 1, type: "#{type}Service")
- end
-
- integrations.create!(id: namespaced_integrations.size + 1, type: 'LegacyService')
- ensure
- integrations.connection.execute 'ALTER TABLE integrations ENABLE TRIGGER "trigger_type_new_on_insert"'
- end
-
- it 'backfills `type_new` for the selected records' do
- # We don't want to mock `Kernel.sleep`, so instead we mock it on the migration
- # class before it gets forwarded.
- expect(migration).to receive(:sleep).with(0.05).exactly(5).times
-
- queries = ActiveRecord::QueryRecorder.new do
- migration.perform(2, 10, :integrations, :id, 2, 50)
- end
-
- expect(queries.count).to be(16)
- expect(queries.log.grep(/^SELECT/).size).to be(11)
- expect(queries.log.grep(/^UPDATE/).size).to be(5)
- expect(queries.log.grep(/^UPDATE/).join.scan(/WHERE .*/)).to eq(
- [
- 'WHERE integrations.id BETWEEN 2 AND 3',
- 'WHERE integrations.id BETWEEN 4 AND 5',
- 'WHERE integrations.id BETWEEN 6 AND 7',
- 'WHERE integrations.id BETWEEN 8 AND 9',
- 'WHERE integrations.id BETWEEN 10 AND 10'
- ])
-
- expect(integrations.where(id: 2..10).pluck(:type, :type_new)).to contain_exactly(
- ['AssemblaService', 'Integrations::Assembla'],
- ['BambooService', 'Integrations::Bamboo'],
- ['BugzillaService', 'Integrations::Bugzilla'],
- ['BuildkiteService', 'Integrations::Buildkite'],
- ['CampfireService', 'Integrations::Campfire'],
- ['ConfluenceService', 'Integrations::Confluence'],
- ['CustomIssueTrackerService', 'Integrations::CustomIssueTracker'],
- ['DatadogService', 'Integrations::Datadog'],
- ['DiscordService', 'Integrations::Discord']
- )
-
- expect(integrations.where.not(id: 2..10)).to all(have_attributes(type_new: nil))
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
deleted file mode 100644
index f98aea2dda7..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_issue_search_data_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchData, :migration, schema: 20220326161803 do
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:issue_search_data_table) { table(:issue_search_data) }
-
- let!(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') }
- let!(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id) }
- let!(:issues) { Array.new(10) { table(:issues).create!(project_id: project.id, title: 'test title', description: 'test description') } }
-
- let(:migration) { described_class.new }
-
- before do
- allow(migration).to receive(:sleep)
- end
-
- it 'backfills search data for the specified records' do
- # sleeps for every sub-batch
- expect(migration).to receive(:sleep).with(0.05).exactly(3).times
-
- migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
-
- expect(issue_search_data_table.count).to eq(6)
- end
-
- it 'skips issues that already have search data' do
- old_time = Time.new(2019, 1, 1).in_time_zone
- issue_search_data_table.create!(project_id: project.id, issue_id: issues[0].id, updated_at: old_time)
-
- migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50)
-
- expect(issue_search_data_table.count).to eq(6)
- expect(issue_search_data_table.find_by_issue_id(issues[0].id).updated_at).to be_like_time(old_time)
- end
-
- it 'rescues batch with bad data and inserts other rows' do
- issues[1].update!(description: Array.new(30_000) { SecureRandom.hex }.join(' '))
-
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
- expect(logger).to receive(:error).with(a_hash_including(message: /string is too long for tsvector/, model_id: issues[1].id))
- end
-
- expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.not_to raise_error
-
- expect(issue_search_data_table.count).to eq(5)
- expect(issue_search_data_table.find_by_issue_id(issues[1].id)).to eq(nil)
- end
-
- it 're-raises other errors' do
- allow(migration).to receive(:update_search_data).and_raise(ActiveRecord::StatementTimeout)
-
- expect { migration.perform(issues[0].id, issues[5].id, :issues, :id, 2, 50) }.to raise_error(ActiveRecord::StatementTimeout)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
deleted file mode 100644
index e1ef12a1479..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillMemberNamespaceForGroupMembers, :migration, schema: 20220120211832 do
- let(:migration) { described_class.new }
- let(:members_table) { table(:members) }
- let(:namespaces_table) { table(:namespaces) }
-
- let(:table_name) { 'members' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 100 }
- let(:pause_ms) { 0 }
-
- subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- namespaces_table.create!(id: 100, name: 'test1', path: 'test1', type: 'Group')
- namespaces_table.create!(id: 101, name: 'test2', path: 'test2', type: 'Group')
- namespaces_table.create!(id: 102, name: 'test3', path: 'test3', type: 'Group')
- namespaces_table.create!(id: 201, name: 'test4', path: 'test4', type: 'Project')
-
- members_table.create!(id: 1, source_id: 100, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
- members_table.create!(id: 2, source_id: 101, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
- members_table.create!(id: 3, source_id: 102, source_type: 'Namespace', type: 'GroupMember', member_namespace_id: 102, access_level: 10, notification_level: 3)
- members_table.create!(id: 4, source_id: 103, source_type: 'Project', type: 'ProjectMember', member_namespace_id: nil, access_level: 10, notification_level: 3)
- members_table.create!(id: 5, source_id: 104, source_type: 'Project', type: 'ProjectMember', member_namespace_id: 201, access_level: 10, notification_level: 3)
- end
-
- it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 2
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(3)
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 0
- expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([100, 101, 102])
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1
- expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 201])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb
deleted file mode 100644
index 3a8a327550b..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForNamespaceRoute, :migration, schema: 20220120123800 do
- let(:migration) { described_class.new }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:routes_table) { table(:routes) }
-
- let(:table_name) { 'routes' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 200 }
- let(:pause_ms) { 0 }
-
- let(:namespace1) { namespaces_table.create!(name: 'namespace1', path: 'namespace1', type: 'User') }
- let(:namespace2) { namespaces_table.create!(name: 'namespace2', path: 'namespace2', type: 'Group') }
- let(:namespace3) { namespaces_table.create!(name: 'namespace3', path: 'namespace3', type: 'Group') }
- let(:namespace4) { namespaces_table.create!(name: 'namespace4', path: 'namespace4', type: 'Group') }
- let(:project1) { projects_table.create!(name: 'project1', namespace_id: namespace1.id) }
-
- subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- routes_table.create!(
- id: 1, name: 'test1', path: 'test1', source_id: namespace1.id, source_type: namespace1.class.sti_name
- )
-
- routes_table.create!(
- id: 2, name: 'test2', path: 'test2', source_id: namespace2.id, source_type: namespace2.class.sti_name
- )
-
- routes_table.create!(
- id: 5, name: 'test3', path: 'test3', source_id: project1.id, source_type: project1.class.sti_name
- ) # should be ignored - project route
-
- routes_table.create!(
- id: 6, name: 'test4', path: 'test4', source_id: non_existing_record_id, source_type: namespace3.class.sti_name
- ) # should be ignored - invalid source_id
-
- routes_table.create!(
- id: 10, name: 'test5', path: 'test5', source_id: namespace3.id, source_type: namespace3.class.sti_name
- )
-
- routes_table.create!(
- id: 11, name: 'test6', path: 'test6', source_id: namespace4.id, source_type: namespace4.class.sti_name
- ) # should be ignored - outside the scope
- end
-
- it 'backfills `type` for the selected records', :aggregate_failures do
- perform_migration
-
- expect(routes_table.where.not(namespace_id: nil).pluck(:id)).to match_array([1, 2, 10])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
deleted file mode 100644
index 525c236b644..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectSettings, :migration, schema: 20220324165436 do
- let(:migration) { described_class.new }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:project_settings_table) { table(:project_settings) }
-
- let(:table_name) { 'projects' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 2 }
- let(:pause_ms) { 0 }
-
- subject(:perform_migration) { migration.perform(1, 30, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- namespaces_table.create!(id: 1, name: 'namespace', path: 'namespace-path', type: 'Group')
- projects_table.create!(id: 11, name: 'group-project-1', path: 'group-project-path-1', namespace_id: 1)
- projects_table.create!(id: 12, name: 'group-project-2', path: 'group-project-path-2', namespace_id: 1)
- project_settings_table.create!(project_id: 11)
-
- namespaces_table.create!(id: 2, name: 'namespace', path: 'namespace-path', type: 'User')
- projects_table.create!(id: 21, name: 'user-project-1', path: 'user--project-path-1', namespace_id: 2)
- projects_table.create!(id: 22, name: 'user-project-2', path: 'user-project-path-2', namespace_id: 2)
- project_settings_table.create!(project_id: 21)
- end
-
- it 'backfills project settings when it does not exist', :aggregate_failures do
- expect(project_settings_table.count).to eq 2
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(5)
-
- expect(project_settings_table.count).to eq 4
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb b/spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb
new file mode 100644
index 00000000000..4b8495cc004
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb
@@ -0,0 +1,197 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillResourceLinkEvents, schema: 20230426085615, feature_category: :team_planning do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:notes) { table(:notes) }
+ let(:system_note_metadata) { table(:system_note_metadata) }
+ let(:resource_link_events) { table(:resource_link_events) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:work_item_issue_type_id) { table(:work_item_types).find_by(namespace_id: nil, name: 'Issue').id }
+ let(:work_item_task_type_id) { table(:work_item_types).find_by(namespace_id: nil, name: 'Task').id }
+
+ # rubocop:disable Layout/LineLength
+ let!(:namespace) { namespaces.create!(name: "namespace", path: "namespace") }
+ let!(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let!(:issue) { issues.create!(iid: 100, project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: work_item_issue_type_id) }
+ let!(:work_item) { issues.create!(iid: 200, project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: work_item_task_type_id) }
+ let!(:user) { users.create!(name: 'user', projects_limit: 10) }
+
+ # Given a system note generated for a child work item, "Added #100 as parent issue",
+ # the migration searches for the parent issue with iid #100 using the child work item's project scope.
+ # Creating antoher issue that has the identical iid under another project ensures the migration is picking up the correct issue.
+ let!(:other_namespace) { namespaces.create!(name: "other_namespace", path: "other_namespace") }
+ let!(:other_project) { projects.create!(namespace_id: other_namespace.id, project_namespace_id: other_namespace.id) }
+ let!(:other_issue) { issues.create!(iid: issue.iid, project_id: other_project.id, namespace_id: other_project.project_namespace_id, work_item_type_id: work_item_issue_type_id) }
+ let!(:other_work_item) { issues.create!(iid: 200, project_id: other_project.id, namespace_id: other_project.project_namespace_id, work_item_type_id: work_item_task_type_id) }
+ # rubocop:enable Layout/LineLength
+
+ subject(:migration) do
+ described_class.new(
+ start_id: system_note_metadata.minimum(:id),
+ end_id: system_note_metadata.maximum(:id),
+ batch_table: :system_note_metadata,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe '#perform' do
+ it 'does nothing when relevant notes do not exist' do
+ expect { migration.perform }
+ .to not_change { resource_link_events.count }
+ end
+
+ shared_examples 'a resource_link_event is correctly created' do
+ it "correctly backfills a resource_link_event record", :aggregate_failures do
+ expect { migration.perform }
+ .to change { resource_link_events.count }.from(0).to(1)
+
+ expect(resource_link_events.last.attributes).to match(a_hash_including(expected_attributes))
+ expect(resource_link_events.last.created_at).to be_like_time(system_note.created_at)
+ end
+ end
+
+ context "for 'relate_to_parent' system_note_metadata record" do
+ let!(:system_note) do
+ create_relate_to_parent_note(parent: issue, child: work_item, issue_type_name: issue_type_name)
+ end
+
+ let(:expected_attributes) do
+ {
+ "action" => described_class::ResourceLinkEvent.actions[:add],
+ "user_id" => user.id,
+ "issue_id" => issue.id,
+ "child_work_item_id" => work_item.id,
+ "system_note_metadata_id" => system_note.id
+ }
+ end
+
+ context 'when issue_type_name is `issue`' do
+ let(:issue_type_name) { 'issue' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+
+ context "when issue_type_name is not `issue`" do
+ let(:issue_type_name) { 'objective' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+ end
+
+ context "for 'unrelate_to_parent' system_note_metadata record" do
+ let!(:system_note) do
+ create_unrelate_from_parent_note(parent: issue, child: work_item, issue_type_name: issue_type_name)
+ end
+
+ let(:expected_attributes) do
+ {
+ "action" => described_class::ResourceLinkEvent.actions[:remove],
+ "user_id" => user.id,
+ "issue_id" => issue.id,
+ "child_work_item_id" => work_item.id,
+ "system_note_metadata_id" => system_note.id
+ }
+ end
+
+ context 'when issue_type_name is `issue`' do
+ let(:issue_type_name) { 'issue' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+
+ context "when issue_type_name is not `issue`" do
+ let(:issue_type_name) { 'objective' }
+
+ it_behaves_like 'a resource_link_event is correctly created'
+ end
+ end
+
+ context "when a backfilled note exists" do
+ let!(:backfilled_system_note) do
+ create_relate_to_parent_note(parent: other_issue, child: other_work_item, issue_type_name: 'issue')
+ end
+
+ let!(:backfilled_resource_link_event) do
+ resource_link_events.create!(
+ action: described_class::ResourceLinkEvent.actions[:add],
+ user_id: user.id,
+ issue_id: other_issue.id,
+ child_work_item_id: other_work_item.id,
+ created_at: backfilled_system_note.created_at,
+ system_note_metadata_id: backfilled_system_note.id)
+ end
+
+ before do
+ # Create two system notes for which resource_link_events should be created (backfilled)
+ create_relate_to_parent_note(parent: issue, child: work_item, issue_type_name: 'issue')
+ create_unrelate_from_parent_note(parent: issue, child: work_item, issue_type_name: 'objective')
+
+ # A backfilled resource_link_event exists for `backfilled_system_note`
+ # No resource_link_event record should be created for `backfilled_system_note`
+ # To test, update `backfilled_system_note` and check `backfilled_resource_link_event` does not change
+ backfilled_system_note.update!(created_at: 1.week.ago)
+ end
+
+ it "correctly backfills the system notes without those that have been backfilled" do
+ expect { migration.perform }
+ .to change { resource_link_events.count }.from(1).to(3)
+ .and not_change { backfilled_resource_link_event }
+ end
+ end
+
+ context 'with unexpected note content' do
+ context 'when note iid is prefixed' do
+ before do
+ note = notes.create!(
+ noteable_type: 'Issue',
+ noteable_id: work_item.id,
+ author_id: user.id,
+ # Cross-project linking is not supported currently.
+ # When an issue is referenced not in its own project,
+ # the iid is prefixed by the project name like gitlab#1
+ # Test the scenario to ensure no resource_link_event is wrongly created.
+ note: "added gitlab##{issue.iid} as parent issue"
+ )
+
+ system_note_metadata.create!(action: 'relate_to_parent', note_id: note.id)
+ end
+
+ it 'does not create resource_link_events record' do
+ expect { migration.perform }
+ .to not_change { resource_link_events.count }
+ end
+ end
+ end
+ end
+
+ def create_relate_to_parent_note(parent:, child:, issue_type_name:)
+ note = notes.create!(
+ noteable_type: 'Issue',
+ noteable_id: child.id,
+ author_id: user.id,
+ note: "added ##{parent.iid} as parent #{issue_type_name}"
+ )
+
+ system_note_metadata.create!(action: 'relate_to_parent', note_id: note.id)
+ end
+
+ def create_unrelate_from_parent_note(parent:, child:, issue_type_name:)
+ note = notes.create!(
+ noteable_type: 'Issue',
+ noteable_id: child.id,
+ author_id: user.id,
+ note: "removed parent #{issue_type_name} ##{parent.iid}"
+ )
+
+ system_note_metadata.create!(action: 'unrelate_from_parent', note_id: note.id)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb b/spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb
new file mode 100644
index 00000000000..a464f89ee69
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb
@@ -0,0 +1,302 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillRootStorageStatisticsForkStorageSizes, schema: 20230517163300, feature_category: :consumables_cost_management do # rubocop:disable Layout/LineLength
+ describe '#perform' do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:root_storage_statistics_table) { table(:namespace_root_storage_statistics) }
+ let(:projects_table) { table(:projects) }
+ let(:project_statistics_table) { table(:project_statistics) }
+ let(:fork_networks_table) { table(:fork_networks) }
+ let(:fork_network_members_table) { table(:fork_network_members) }
+
+ it 'updates the public_forks_storage_size' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, storage_size: 100)
+
+ migrate
+
+ expect(root_storage_statistics.reload.public_forks_storage_size).to eq(100)
+ end
+
+ it 'totals the size of public forks in the namespace' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, name: 'my fork', storage_size: 100)
+ create_fork!(project, name: 'my other fork', storage_size: 100)
+
+ migrate
+
+ expect(root_storage_statistics.reload.public_forks_storage_size).to eq(200)
+ end
+
+ it 'updates the internal_forks_storage_size' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, storage_size: 250)
+
+ migrate
+
+ expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(250)
+ end
+
+ it 'totals the size of internal forks in the namespace' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, name: 'my fork', storage_size: 300)
+ create_fork!(project, name: 'my other fork', storage_size: 300)
+
+ migrate
+
+ expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(600)
+ end
+
+ it 'updates the private_forks_storage_size' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ create_fork!(project, storage_size: 50)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(50)
+ end
+
+ it 'totals the size of private forks in the namespace' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ create_fork!(project, name: 'my fork', storage_size: 350)
+ create_fork!(project, name: 'my other fork', storage_size: 400)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(750)
+ end
+
+ it 'counts only the size of forks' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, storage_size: 100,
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, name: 'my public fork', storage_size: 150,
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, name: 'my internal fork', storage_size: 250,
+ visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, name: 'my private fork', storage_size: 350,
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(150)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(250)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(350)
+ end
+
+ it 'sums forks for multiple namespaces' do
+ namespace_a, root_storage_statistics_a = create_namespace!
+ namespace_b, root_storage_statistics_b = create_namespace!
+ project = create_project!(namespace: namespace_a)
+ create_fork!(project, namespace: namespace_a, storage_size: 100)
+ create_fork!(project, namespace: namespace_b, storage_size: 200)
+
+ migrate
+
+ expect(root_storage_statistics_a.reload.private_forks_storage_size).to eq(100)
+ expect(root_storage_statistics_b.reload.private_forks_storage_size).to eq(200)
+ end
+
+ it 'counts the size of forks in subgroups' do
+ group, root_storage_statistics = create_group!
+ subgroup = create_group!(parent: group)
+ project = create_project!(namespace: group, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: subgroup, name: 'my fork A',
+ storage_size: 123, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: subgroup, name: 'my fork B',
+ storage_size: 456, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, namespace: subgroup, name: 'my fork C',
+ storage_size: 789, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(123)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(456)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(789)
+ end
+
+ it 'counts the size of forks in more nested subgroups' do
+ root, root_storage_statistics = create_group!
+ child = create_group!(parent: root)
+ grand_child = create_group!(parent: child)
+ great_grand_child = create_group!(parent: grand_child)
+ project = create_project!(namespace: root, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: grand_child, name: 'my fork A',
+ storage_size: 200, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, namespace: great_grand_child, name: 'my fork B',
+ storage_size: 300, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, namespace: great_grand_child, name: 'my fork C',
+ storage_size: 400, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(200)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(300)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(400)
+ end
+
+ it 'counts forks of forks' do
+ group, root_storage_statistics = create_group!
+ other_group, other_root_storage_statistics = create_group!
+ project = create_project!(namespace: group)
+ fork_a = create_fork!(project, namespace: group, storage_size: 100)
+ fork_b = create_fork!(fork_a, name: 'my other fork', namespace: group, storage_size: 50)
+ create_fork!(fork_b, namespace: other_group, storage_size: 27)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(150)
+ expect(other_root_storage_statistics.reload.private_forks_storage_size).to eq(27)
+ end
+
+ it 'counts multiple forks of the same project' do
+ group, root_storage_statistics = create_group!
+ project = create_project!(namespace: group)
+ create_fork!(project, storage_size: 200)
+ create_fork!(project, name: 'my other fork', storage_size: 88)
+
+ migrate
+
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(288)
+ end
+
+ it 'updates a namespace with no forks' do
+ namespace, root_storage_statistics = create_namespace!
+ create_project!(namespace: namespace)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(0)
+ end
+
+ it 'skips the update if the public_forks_storage_size has already been set' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ create_fork!(project, storage_size: 200)
+ root_storage_statistics.update!(public_forks_storage_size: 100)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(100)
+ end
+
+ it 'skips the update if the internal_forks_storage_size has already been set' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ create_fork!(project, storage_size: 200)
+ root_storage_statistics.update!(internal_forks_storage_size: 100)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(100)
+ end
+
+ it 'skips the update if the private_forks_storage_size has already been set' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ create_fork!(project, storage_size: 200)
+ root_storage_statistics.update!(private_forks_storage_size: 100)
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.private_forks_storage_size).to eq(100)
+ end
+
+ it 'skips the update if the namespace is not found' do
+ namespace, root_storage_statistics = create_namespace!
+ project = create_project!(namespace: namespace)
+ create_fork!(project, storage_size: 100)
+ allow(::ApplicationRecord.connection).to receive(:execute)
+ .with("SELECT type FROM namespaces WHERE id = #{namespace.id}")
+ .and_return([])
+
+ migrate
+
+ root_storage_statistics.reload
+ expect(root_storage_statistics.public_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.internal_forks_storage_size).to eq(0)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(0)
+ end
+ end
+
+ def create_namespace!(name: 'abc', path: 'abc')
+ namespace = namespaces_table.create!(name: name, path: path)
+ namespace.update!(traversal_ids: [namespace.id])
+ root_storage_statistics = root_storage_statistics_table.create!(namespace_id: namespace.id)
+
+ [namespace, root_storage_statistics]
+ end
+
+ def create_group!(name: 'abc', path: 'abc', parent: nil)
+ parent_id = parent.try(:id)
+ group = namespaces_table.create!(name: name, path: path, type: 'Group', parent_id: parent_id)
+
+ if parent_id
+ parent_traversal_ids = namespaces_table.find(parent_id).traversal_ids
+ group.update!(traversal_ids: parent_traversal_ids + [group.id])
+ group
+ else
+ group.update!(traversal_ids: [group.id])
+ root_storage_statistics = root_storage_statistics_table.create!(namespace_id: group.id)
+ [group, root_storage_statistics]
+ end
+ end
+
+ def create_project!(
+ namespace:, storage_size: 100, name: 'my project',
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project_namespace = namespaces_table.create!(name: name, path: name)
+ project = projects_table.create!(name: name, namespace_id: namespace.id, project_namespace_id: project_namespace.id,
+ visibility_level: visibility_level)
+ project_statistics_table.create!(project_id: project.id, namespace_id: project.namespace_id,
+ storage_size: storage_size)
+
+ project
+ end
+
+ def create_fork!(project, storage_size:, name: 'my fork', visibility_level: nil, namespace: nil)
+ fork_namespace = namespace || namespaces_table.find(project.namespace_id)
+ fork_visibility_level = visibility_level || project.visibility_level
+
+ project_fork = create_project!(name: name, namespace: fork_namespace,
+ visibility_level: fork_visibility_level, storage_size: storage_size)
+
+ fork_network_id = if membership = fork_network_members_table.find_by(project_id: project.id)
+ membership.fork_network_id
+ else
+ fork_network = fork_networks_table.create!(root_project_id: project.id)
+ fork_network_members_table.create!(fork_network_id: fork_network.id, project_id: project.id)
+ fork_network.id
+ end
+
+ fork_network_members_table.create!(fork_network_id: fork_network_id, project_id: project_fork.id,
+ forked_from_project_id: project.id)
+
+ project_fork
+ end
+
+ def migrate
+ described_class.new(start_id: 1, end_id: root_storage_statistics_table.last.id,
+ batch_table: 'namespace_root_storage_statistics',
+ batch_column: 'namespace_id',
+ sub_batch_size: 100, pause_ms: 0,
+ connection: ApplicationRecord.connection).perform
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index d8874cb811b..9f76e4131b2 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -107,7 +107,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
last_commit = raw_repository(snippet).commit
aggregate_failures do
- expect(blob).to be
+ expect(blob).to be_present
expect(blob.data).to eq content
expect(last_commit.author_name).to eq user.name
expect(last_commit.author_email).to eq user.email
@@ -225,13 +225,13 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
it 'converts invalid filenames' do
subject
- expect(blob_at(snippet_with_invalid_path, converted_file_name)).to be
+ expect(blob_at(snippet_with_invalid_path, converted_file_name)).to be_present
end
it 'does not convert valid filenames on subsequent migrations' do
subject
- expect(blob_at(snippet_with_valid_path, file_name)).to be
+ expect(blob_at(snippet_with_valid_path, file_name)).to be_present
end
end
end
@@ -293,8 +293,8 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
it 'updates the file_name only when it is invalid' do
subject
- expect(blob_at(invalid_snippet, 'snippetfile1.txt')).to be
- expect(blob_at(snippet, file_name)).to be
+ expect(blob_at(invalid_snippet, 'snippetfile1.txt')).to be_present
+ expect(blob_at(snippet, file_name)).to be_present
end
it_behaves_like 'migration_bot user commits files' do
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
deleted file mode 100644
index dc0935efa94..00000000000
--- a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectNamespacePerGroupBatchingStrategy, '#next_batch', :migration, schema: 20220326161803 do
- let!(:namespaces) { table(:namespaces) }
- let!(:projects) { table(:projects) }
- let!(:background_migrations) { table(:batched_background_migrations) }
-
- let!(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'batch-test1') }
- let!(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'batch-test2') }
- let!(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'batch-test3') }
-
- let!(:project1) { projects.create!(name: 'project1', path: 'project1', namespace_id: namespace1.id, visibility_level: 20) }
- let!(:project2) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, visibility_level: 20) }
- let!(:project3) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, visibility_level: 20) }
- let!(:project4) { projects.create!(name: 'project4', path: 'project4', namespace_id: namespace3.id, visibility_level: 20) }
- let!(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
-
- let(:job_arguments) { [namespace1.id, 'up'] }
-
- context 'when starting on the first batch' do
- it 'returns the bounds of the next batch' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project1.id, batch_size: 3, job_arguments: job_arguments)
-
- expect(batch_bounds).to match_array([project1.id, project3.id])
- end
- end
-
- context 'when additional batches remain' do
- it 'returns the bounds of the next batch' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project2.id, batch_size: 3, job_arguments: job_arguments)
-
- expect(batch_bounds).to match_array([project2.id, project4.id])
- end
- end
-
- context 'when on the final batch' do
- it 'returns the bounds of the next batch' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project4.id, batch_size: 3, job_arguments: job_arguments)
-
- expect(batch_bounds).to match_array([project4.id, project4.id])
- end
- end
-
- context 'when no additional batches remain' do
- it 'returns nil' do
- batch_bounds = batching_strategy.next_batch(:projects, :id, batch_min_value: project4.id + 1, batch_size: 1, job_arguments: job_arguments)
-
- expect(batch_bounds).to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb b/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb
deleted file mode 100644
index d1ef7ca2188..00000000000
--- a/spec/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::CleanupDraftDataFromFaultyRegex, :migration, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:merge_requests) { table(:merge_requests) }
-
- let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
- let(:project) { projects.create!(namespace_id: group.id) }
-
- let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
-
- def create_merge_request(params)
- common_params = {
- target_project_id: project.id,
- target_branch: 'feature1',
- source_branch: 'master'
- }
-
- merge_requests.create!(common_params.merge(params))
- end
-
- context "mr.draft == true, and title matches the leaky regex and not the corrected regex" do
- let(:mr_ids) { merge_requests.all.collect(&:id) }
-
- before do
- draft_prefixes.each do |prefix|
- (1..4).each do |n|
- create_merge_request(
- title: "#{prefix} This is a title",
- draft: true,
- state_id: 1
- )
- end
- end
-
- create_merge_request(title: "This has draft in the title", draft: true, state_id: 1)
- end
-
- it "updates all open draft merge request's draft field to true" do
- expect { subject.perform(mr_ids.first, mr_ids.last) }
- .to change { MergeRequest.where(draft: true).count }
- .by(-1)
- end
-
- it "marks successful slices as completed" do
- expect(subject).to receive(:mark_job_as_succeeded).with(mr_ids.first, mr_ids.last)
-
- subject.perform(mr_ids.first, mr_ids.last)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
index b92f1a74551..a153507837c 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForProjectsLessThanFiveMb,
:migration,
schema: 20221018095434,
- feature_category: :projects do
+ feature_category: :groups_and_projects do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
.perform
end
- it 'sets `legacy_open_source_license_available` to false only for projects less than 5 MB', :aggregate_failures do
+ it 'sets `legacy_open_source_license_available` to false only for projects less than 5 MiB', :aggregate_failures do
project_setting_2_mb = create_legacy_license_project_setting(repo_size: 2)
project_setting_4_mb = create_legacy_license_project_setting(repo_size: 4)
project_setting_5_mb = create_legacy_license_project_setting(repo_size: 5)
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
private
- # @param repo_size: Repo size in MB
+ # @param repo_size: Repo size in MiB
def create_legacy_license_project_setting(repo_size:)
path = "path-for-repo-size-#{repo_size}"
namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
index 205350f9df4..2e6bc2f77ae 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
.perform
end
- it 'sets `legacy_open_source_license_available` to false only for projects less than 1 MB',
+ it 'sets `legacy_open_source_license_available` to false only for projects less than 1 MiB',
:aggregate_failures do
project_setting_1_mb = create_legacy_license_project_setting(repo_size: 1)
project_setting_2_mb = create_legacy_license_project_setting(repo_size: 2)
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForPro
private
- # @param repo_size: Repo size in MB
+ # @param repo_size: Repo size in MiB
def create_legacy_license_project_setting(repo_size:)
path = "path-for-repo-size-#{repo_size}"
namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
diff --git a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
deleted file mode 100644
index c788b701d79..00000000000
--- a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties, schema: 20220415124804 do
- let(:integrations) do
- table(:integrations) do |integrations|
- integrations.send :attr_encrypted, :encrypted_properties_tmp,
- attribute: :encrypted_properties,
- mode: :per_attribute_iv,
- key: ::Settings.attr_encrypted_db_key_base_32,
- algorithm: 'aes-256-gcm',
- marshal: true,
- marshaler: ::Gitlab::Json,
- encode: false,
- encode_iv: false
- end
- end
-
- let!(:no_properties) { integrations.create! }
- let!(:with_plaintext_1) { integrations.create!(properties: json_props(1)) }
- let!(:with_plaintext_2) { integrations.create!(properties: json_props(2)) }
- let!(:with_encrypted) do
- x = integrations.new
- x.properties = nil
- x.encrypted_properties_tmp = some_props(3)
- x.save!
- x
- end
-
- let(:start_id) { integrations.minimum(:id) }
- let(:end_id) { integrations.maximum(:id) }
-
- it 'ensures all properties are encrypted', :aggregate_failures do
- described_class.new.perform(start_id, end_id)
-
- props = integrations.all.to_h do |record|
- [record.id, [Gitlab::Json.parse(record.properties), record.encrypted_properties_tmp]]
- end
-
- expect(integrations.count).to eq(4)
-
- expect(props).to match(
- no_properties.id => both(be_nil),
- with_plaintext_1.id => both(eq some_props(1)),
- with_plaintext_2.id => both(eq some_props(2)),
- with_encrypted.id => match([be_nil, eq(some_props(3))])
- )
- end
-
- private
-
- def both(obj)
- match [obj, obj]
- end
-
- def some_props(id)
- HashWithIndifferentAccess.new({ id: id, foo: 1, bar: true, baz: %w[a string array] })
- end
-
- def json_props(id)
- some_props(id).to_json
- end
-end
diff --git a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb
deleted file mode 100644
index 4e7b97d33f6..00000000000
--- a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::EncryptStaticObjectToken do
- let(:users) { table(:users) }
- let!(:user_without_tokens) { create_user!(name: 'notoken') }
- let!(:user_with_plaintext_token_1) { create_user!(name: 'plaintext_1', token: 'token') }
- let!(:user_with_plaintext_token_2) { create_user!(name: 'plaintext_2', token: 'TOKEN') }
- let!(:user_with_plaintext_empty_token) { create_user!(name: 'plaintext_3', token: '') }
- let!(:user_with_encrypted_token) { create_user!(name: 'encrypted', encrypted_token: 'encrypted') }
- let!(:user_with_both_tokens) { create_user!(name: 'both', token: 'token2', encrypted_token: 'encrypted2') }
-
- before do
- allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).and_call_original
- allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('token') { 'secure_token' }
- allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('TOKEN') { 'SECURE_TOKEN' }
- end
-
- subject { described_class.new.perform(start_id, end_id) }
-
- let(:start_id) { users.minimum(:id) }
- let(:end_id) { users.maximum(:id) }
-
- it 'backfills encrypted tokens to users with plaintext token only', :aggregate_failures do
- subject
-
- new_state = users.pluck(:id, :static_object_token, :static_object_token_encrypted).to_h do |row|
- [row[0], [row[1], row[2]]]
- end
-
- expect(new_state.count).to eq(6)
-
- expect(new_state[user_with_plaintext_token_1.id]).to match_array(%w[token secure_token])
- expect(new_state[user_with_plaintext_token_2.id]).to match_array(%w[TOKEN SECURE_TOKEN])
-
- expect(new_state[user_with_plaintext_empty_token.id]).to match_array(['', nil])
- expect(new_state[user_without_tokens.id]).to match_array([nil, nil])
- expect(new_state[user_with_both_tokens.id]).to match_array(%w[token2 encrypted2])
- expect(new_state[user_with_encrypted_token.id]).to match_array([nil, 'encrypted'])
- end
-
- context 'when id range does not include existing user ids' do
- let(:arguments) { [non_existing_record_id, non_existing_record_id.succ] }
-
- it_behaves_like 'marks background migration job records' do
- subject { described_class.new }
- end
- end
-
- private
-
- def create_user!(name:, token: nil, encrypted_token: nil)
- email = "#{name}@example.com"
-
- table(:users).create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- static_object_token: token,
- static_object_token_encrypted: encrypted_token
- )
- end
-end
diff --git a/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb b/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb
deleted file mode 100644
index 65663d26f37..00000000000
--- a/spec/lib/gitlab/background_migration/fix_duplicate_project_name_and_path_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixDuplicateProjectNameAndPath, :migration, schema: 20220325155953 do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:routes) { table(:routes) }
-
- let(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'batch-test1') }
- let(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'batch-test2') }
- let(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'batch-test3') }
-
- let(:project_namespace2) { namespaces.create!(name: 'project2', path: 'project2', type: 'Project', parent_id: namespace2.id, visibility_level: 20) }
- let(:project_namespace3) { namespaces.create!(name: 'project3', path: 'project3', type: 'Project', parent_id: namespace3.id, visibility_level: 20) }
-
- let(:project1) { projects.create!(name: 'project1', path: 'project1', namespace_id: namespace1.id, visibility_level: 20) }
- let(:project2) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, project_namespace_id: project_namespace2.id, visibility_level: 20) }
- let(:project2_dup) { projects.create!(name: 'project2', path: 'project2', namespace_id: namespace2.id, visibility_level: 20) }
- let(:project3) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, project_namespace_id: project_namespace3.id, visibility_level: 20) }
- let(:project3_dup) { projects.create!(name: 'project3', path: 'project3', namespace_id: namespace3.id, visibility_level: 20) }
-
- let!(:namespace_route1) { routes.create!(path: 'batch-test1', source_id: namespace1.id, source_type: 'Namespace') }
- let!(:namespace_route2) { routes.create!(path: 'batch-test1/batch-test2', source_id: namespace2.id, source_type: 'Namespace') }
- let!(:namespace_route3) { routes.create!(path: 'batch-test1/batch-test3', source_id: namespace3.id, source_type: 'Namespace') }
-
- let!(:proj_route1) { routes.create!(path: 'batch-test1/project1', source_id: project1.id, source_type: 'Project') }
- let!(:proj_route2) { routes.create!(path: 'batch-test1/batch-test2/project2', source_id: project2.id, source_type: 'Project') }
- let!(:proj_route2_dup) { routes.create!(path: "batch-test1/batch-test2/project2-route-#{project2_dup.id}", source_id: project2_dup.id, source_type: 'Project') }
- let!(:proj_route3) { routes.create!(path: 'batch-test1/batch-test3/project3', source_id: project3.id, source_type: 'Project') }
- let!(:proj_route3_dup) { routes.create!(path: "batch-test1/batch-test3/project3-route-#{project3_dup.id}", source_id: project3_dup.id, source_type: 'Project') }
-
- subject(:perform_migration) { migration.perform(projects.minimum(:id), projects.maximum(:id)) }
-
- describe '#up' do
- it 'backfills namespace_id for the selected records', :aggregate_failures do
- expect(namespaces.where(type: 'Project').count).to eq(2)
-
- perform_migration
-
- expect(namespaces.where(type: 'Project').count).to eq(5)
-
- expect(project1.reload.name).to eq("project1-#{project1.id}")
- expect(project1.path).to eq('project1')
-
- expect(project2.reload.name).to eq('project2')
- expect(project2.path).to eq('project2')
-
- expect(project2_dup.reload.name).to eq("project2-#{project2_dup.id}")
- expect(project2_dup.path).to eq("project2-route-#{project2_dup.id}")
-
- expect(project3.reload.name).to eq("project3")
- expect(project3.path).to eq("project3")
-
- expect(project3_dup.reload.name).to eq("project3-#{project3_dup.id}")
- expect(project3_dup.path).to eq("project3-route-#{project3_dup.id}")
-
- projects.all.each do |pr|
- project_namespace = namespaces.find(pr.project_namespace_id)
- expect(project_namespace).to be_in_sync_with_project(pr)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb b/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb
deleted file mode 100644
index af551861d47..00000000000
--- a/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb
+++ /dev/null
@@ -1,232 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixVulnerabilityOccurrencesWithHashesAsRawMetadata, schema: 20211209203821 do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:scanners) { table(:vulnerability_scanners) }
- let(:identifiers) { table(:vulnerability_identifiers) }
- let(:findings) { table(:vulnerability_occurrences) }
-
- let(:user) { users.create!(name: 'Test User', projects_limit: 10, username: 'test-user', email: '1') }
-
- let(:namespace) do
- namespaces.create!(
- owner_id: user.id,
- name: user.name,
- path: user.username
- )
- end
-
- let(:project) do
- projects.create!(namespace_id: namespace.id, name: 'Test Project')
- end
-
- let(:scanner) do
- scanners.create!(
- project_id: project.id,
- external_id: 'test-scanner',
- name: 'Test Scanner',
- vendor: 'GitLab'
- )
- end
-
- let(:primary_identifier) do
- identifiers.create!(
- project_id: project.id,
- external_type: 'cve',
- name: 'CVE-2021-1234',
- external_id: 'CVE-2021-1234',
- fingerprint: '4c0fe491999f94701ee437588554ef56322ae276'
- )
- end
-
- let(:finding) do
- findings.create!(
- raw_metadata: raw_metadata,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: primary_identifier.id,
- uuid: '4deb090a-bedf-5ccc-aa9a-ac8055a1ea81',
- project_fingerprint: '1caa750a6dad769a18ad6f40b413b3b6ab1c8d77',
- location_fingerprint: '6d1f35f53b065238abfcadc01336ce65d112a2bd',
- name: 'name',
- report_type: 7,
- severity: 0,
- confidence: 0,
- detection_method: 'gitlab_security_report',
- metadata_version: 'cluster_image_scanning:1.0',
- created_at: "2021-12-10 14:27:42 -0600",
- updated_at: "2021-12-10 14:27:42 -0600"
- )
- end
-
- subject(:perform) { described_class.new.perform(finding.id, finding.id) }
-
- context 'with stringified hash as raw_metadata' do
- let(:raw_metadata) do
- '{:location=>{"image"=>"index.docker.io/library/nginx:latest", "kubernetes_resource"=>{"namespace"=>"production", "kind"=>"deployment", "name"=>"nginx", "container_name"=>"nginx", "agent_id"=>"2"}, "dependency"=>{"package"=>{"name"=>"libc"}, "version"=>"v1.2.3"}}}'
- end
-
- it 'converts stringified hash to JSON' do
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- metadata = Oj.load(result)
- expect(metadata).to eq(
- {
- 'location' => {
- 'image' => 'index.docker.io/library/nginx:latest',
- 'kubernetes_resource' => {
- 'namespace' => 'production',
- 'kind' => 'deployment',
- 'name' => 'nginx',
- 'container_name' => 'nginx',
- 'agent_id' => '2'
- },
- 'dependency' => {
- 'package' => { 'name' => 'libc' },
- 'version' => 'v1.2.3'
- }
- }
- }
- )
- end
- end
-
- context 'with valid raw_metadata' do
- where(:raw_metadata) do
- [
- '{}',
- '{"location":null}',
- '{"location":{"image":"index.docker.io/library/nginx:latest","kubernetes_resource":{"namespace":"production","kind":"deployment","name":"nginx","container_name":"nginx","agent_id":"2"},"dependency":{"package":{"name":"libc"},"version":"v1.2.3"}}}'
- ]
- end
-
- with_them do
- it 'does not change the raw_metadata' do
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- expect(result).to eq(raw_metadata)
- end
- end
- end
-
- context 'when raw_metadata contains forbidden types' do
- using RSpec::Parameterized::TableSyntax
-
- where(:raw_metadata, :type) do
- 'def foo; "bar"; end' | :def
- '`cat somefile`' | :xstr
- 'exec("cat /etc/passwd")' | :send
- end
-
- with_them do
- it 'does not change the raw_metadata' do
- expect(Gitlab::AppLogger).to receive(:error).with(message: "expected raw_metadata to be a hash", type: type)
-
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- expect(result).to eq(raw_metadata)
- end
- end
- end
-
- context 'when forbidden types are nested inside a hash' do
- using RSpec::Parameterized::TableSyntax
-
- where(:raw_metadata, :type) do
- '{:location=>Env.fetch("SOME_VAR")}' | :send
- '{:location=>{:image=>Env.fetch("SOME_VAR")}}' | :send
- # rubocop:disable Lint/InterpolationCheck
- '{"key"=>"value: #{send}"}' | :dstr
- # rubocop:enable Lint/InterpolationCheck
- end
-
- with_them do
- it 'does not change the raw_metadata' do
- expect(Gitlab::AppLogger).to receive(:error).with(
- message: "error parsing raw_metadata",
- error: "value of a pair was an unexpected type",
- type: type
- )
-
- expect { perform }.not_to raise_error
-
- result = finding.reload.raw_metadata
- expect(result).to eq(raw_metadata)
- end
- end
- end
-
- context 'when key is an unexpected type' do
- let(:raw_metadata) { "{nil=>nil}" }
-
- it 'logs error' do
- expect(Gitlab::AppLogger).to receive(:error).with(
- message: "error parsing raw_metadata",
- error: "expected key to be either symbol, string, or integer",
- type: :nil
- )
-
- expect { perform }.not_to raise_error
- end
- end
-
- context 'when raw_metadata cannot be parsed' do
- let(:raw_metadata) { "{" }
-
- it 'logs error' do
- expect(Gitlab::AppLogger).to receive(:error).with(message: "error parsing raw_metadata", error: "unexpected token $end")
-
- expect { perform }.not_to raise_error
- end
- end
-
- describe '#hash_from_s' do
- subject { described_class.new.hash_from_s(input) }
-
- context 'with valid input' do
- let(:input) { '{:location=>{"image"=>"index.docker.io/library/nginx:latest", "kubernetes_resource"=>{"namespace"=>"production", "kind"=>"deployment", "name"=>"nginx", "container_name"=>"nginx", "agent_id"=>2}, "dependency"=>{"package"=>{"name"=>"libc"}, "version"=>"v1.2.3"}}}' }
-
- it 'converts string to a hash' do
- expect(subject).to eq({
- location: {
- 'image' => 'index.docker.io/library/nginx:latest',
- 'kubernetes_resource' => {
- 'namespace' => 'production',
- 'kind' => 'deployment',
- 'name' => 'nginx',
- 'container_name' => 'nginx',
- 'agent_id' => 2
- },
- 'dependency' => {
- 'package' => { 'name' => 'libc' },
- 'version' => 'v1.2.3'
- }
- }
- })
- end
- end
-
- using RSpec::Parameterized::TableSyntax
-
- where(:input, :expected) do
- '{}' | {}
- '{"bool"=>true}' | { 'bool' => true }
- '{"bool"=>false}' | { 'bool' => false }
- '{"nil"=>nil}' | { 'nil' => nil }
- '{"array"=>[1, "foo", nil]}' | { 'array' => [1, "foo", nil] }
- '{foo: :bar}' | { foo: :bar }
- '{foo: {bar: "bin"}}' | { foo: { bar: "bin" } }
- end
-
- with_them do
- specify { expect(subject).to eq(expected) }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb b/spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb
new file mode 100644
index 00000000000..05a19b7973c
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/mark_duplicate_npm_packages_for_destruction_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MarkDuplicateNpmPackagesForDestruction, schema: 20230524201454, feature_category: :package_registry do # rubocop:disable Layout/LineLength
+ describe '#perform' do
+ let(:projects_table) { table(:projects) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:packages_table) { table(:packages_packages) }
+
+ let!(:namespace) do
+ namespaces_table.create!(name: 'project', path: 'project', type: 'Project')
+ end
+
+ let!(:project) do
+ projects_table.create!(
+ namespace_id: namespace.id,
+ name: 'project',
+ path: 'project',
+ project_namespace_id: namespace.id
+ )
+ end
+
+ let!(:package_1) do
+ packages_table.create!(
+ project_id: project.id,
+ name: 'test1',
+ version: '1.0.0',
+ package_type: described_class::NPM_PACKAGE_TYPE
+ )
+ end
+
+ let!(:package_2) do
+ packages_table.create!(
+ project_id: project.id,
+ name: 'test2',
+ version: '1.0.0',
+ package_type: described_class::NPM_PACKAGE_TYPE
+ )
+ end
+
+ let!(:package_3) do
+ packages_table.create!(
+ project_id: project.id,
+ name: 'test3',
+ version: '1.0.0',
+ package_type: described_class::NPM_PACKAGE_TYPE
+ )
+ end
+
+ let(:migration) do
+ described_class.new(
+ start_id: projects_table.minimum(:id),
+ end_id: projects_table.maximum(:id),
+ batch_table: :packages_packages,
+ batch_column: :project_id,
+ sub_batch_size: 10,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ before do
+ # create a duplicated package without triggering model validation errors
+ package_2.update_column(:name, package_1.name)
+ package_3.update_column(:name, package_1.name)
+ end
+
+ it 'marks duplicate npm packages for destruction', :aggregate_failures do
+ packages_marked_for_destruction = described_class::Package
+ .where(status: described_class::PENDING_DESTRUCTION_STATUS)
+
+ expect { migration.perform }
+ .to change { packages_marked_for_destruction.count }.from(0).to(2)
+ expect(package_3.reload.status).not_to eq(described_class::PENDING_DESTRUCTION_STATUS)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb b/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb
deleted file mode 100644
index 2c2c048992f..00000000000
--- a/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb
+++ /dev/null
@@ -1,148 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MergeTopicsWithSameName, schema: 20220331133802 do
- def set_avatar(topic_id, avatar)
- topic = ::Projects::Topic.find(topic_id)
- topic.avatar = avatar
- topic.save!
- topic.avatar.absolute_path
- end
-
- it 'merges project topics with same case insensitive name' do
- namespaces = table(:namespaces)
- projects = table(:projects)
- topics = table(:topics)
- project_topics = table(:project_topics)
-
- group_1 = namespaces.create!(name: 'space1', type: 'Group', path: 'space1')
- group_2 = namespaces.create!(name: 'space2', type: 'Group', path: 'space2')
- group_3 = namespaces.create!(name: 'space3', type: 'Group', path: 'space3')
- proj_space_1 = namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: group_1.id)
- proj_space_2 = namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: group_2.id)
- proj_space_3 = namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: group_3.id)
- project_1 = projects.create!(namespace_id: group_1.id, project_namespace_id: proj_space_1.id, visibility_level: 20)
- project_2 = projects.create!(namespace_id: group_2.id, project_namespace_id: proj_space_2.id, visibility_level: 10)
- project_3 = projects.create!(namespace_id: group_3.id, project_namespace_id: proj_space_3.id, visibility_level: 0)
- topic_1_keep = topics.create!(
- name: 'topic1',
- title: 'Topic 1',
- description: 'description 1 to keep',
- total_projects_count: 2,
- non_private_projects_count: 2
- )
- topic_1_remove = topics.create!(
- name: 'TOPIC1',
- title: 'Topic 1',
- description: 'description 1 to remove',
- total_projects_count: 2,
- non_private_projects_count: 1
- )
- topic_2_remove = topics.create!(
- name: 'topic2',
- title: 'Topic 2',
- total_projects_count: 0
- )
- topic_2_keep = topics.create!(
- name: 'TOPIC2',
- title: 'Topic 2',
- description: 'description 2 to keep',
- total_projects_count: 1
- )
- topic_3_remove_1 = topics.create!(
- name: 'topic3',
- title: 'Topic 3',
- total_projects_count: 2,
- non_private_projects_count: 1
- )
- topic_3_keep = topics.create!(
- name: 'Topic3',
- title: 'Topic 3',
- total_projects_count: 2,
- non_private_projects_count: 2
- )
- topic_3_remove_2 = topics.create!(
- name: 'TOPIC3',
- title: 'Topic 3',
- description: 'description 3 to keep',
- total_projects_count: 2,
- non_private_projects_count: 1
- )
- topic_4_keep = topics.create!(
- name: 'topic4',
- title: 'Topic 4'
- )
-
- project_topics_1 = []
- project_topics_3 = []
- project_topics_removed = []
-
- project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_1.id)
- project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_2.id)
- project_topics_removed << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_2.id)
- project_topics_1 << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_3.id)
-
- project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_1.id)
- project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_2.id)
- project_topics_removed << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_1.id)
- project_topics_3 << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_3.id)
- project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_1.id)
- project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_3.id)
-
- avatar_paths = {
- topic_1_keep: set_avatar(topic_1_keep.id, fixture_file_upload('spec/fixtures/avatars/avatar1.png')),
- topic_1_remove: set_avatar(topic_1_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar2.png')),
- topic_2_remove: set_avatar(topic_2_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar3.png')),
- topic_3_remove_1: set_avatar(topic_3_remove_1.id, fixture_file_upload('spec/fixtures/avatars/avatar4.png')),
- topic_3_remove_2: set_avatar(topic_3_remove_2.id, fixture_file_upload('spec/fixtures/avatars/avatar5.png'))
- }
-
- subject.perform(%w[topic1 topic2 topic3 topic4])
-
- # Topics
- [topic_1_keep, topic_2_keep, topic_3_keep, topic_4_keep].each(&:reload)
- expect(topic_1_keep.name).to eq('topic1')
- expect(topic_1_keep.description).to eq('description 1 to keep')
- expect(topic_1_keep.total_projects_count).to eq(3)
- expect(topic_1_keep.non_private_projects_count).to eq(2)
- expect(topic_2_keep.name).to eq('TOPIC2')
- expect(topic_2_keep.description).to eq('description 2 to keep')
- expect(topic_2_keep.total_projects_count).to eq(0)
- expect(topic_2_keep.non_private_projects_count).to eq(0)
- expect(topic_3_keep.name).to eq('Topic3')
- expect(topic_3_keep.description).to eq('description 3 to keep')
- expect(topic_3_keep.total_projects_count).to eq(3)
- expect(topic_3_keep.non_private_projects_count).to eq(2)
- expect(topic_4_keep.reload.name).to eq('topic4')
-
- [topic_1_remove, topic_2_remove, topic_3_remove_1, topic_3_remove_2].each do |topic|
- expect { topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
-
- # Topic avatars
- expect(topic_1_keep.avatar).to eq('avatar1.png')
- expect(File.exist?(::Projects::Topic.find(topic_1_keep.id).avatar.absolute_path)).to be_truthy
- expect(topic_2_keep.avatar).to eq('avatar3.png')
- expect(File.exist?(::Projects::Topic.find(topic_2_keep.id).avatar.absolute_path)).to be_truthy
- expect(topic_3_keep.avatar).to eq('avatar4.png')
- expect(File.exist?(::Projects::Topic.find(topic_3_keep.id).avatar.absolute_path)).to be_truthy
-
- [:topic_1_remove, :topic_2_remove, :topic_3_remove_1, :topic_3_remove_2].each do |topic|
- expect(File.exist?(avatar_paths[topic])).to be_falsey
- end
-
- # Project Topic assignments
- project_topics_1.each do |project_topic|
- expect(project_topic.reload.topic_id).to eq(topic_1_keep.id)
- end
-
- project_topics_3.each do |project_topic|
- expect(project_topic.reload.topic_id).to eq(topic_3_keep.id)
- end
-
- project_topics_removed.each do |project_topic|
- expect { project_topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
deleted file mode 100644
index 07e77bdbc13..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigratePersonalNamespaceProjectMaintainerToOwner, :migration, schema: 20220208080921 do
- let(:migration) { described_class.new }
- let(:users_table) { table(:users) }
- let(:members_table) { table(:members) }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
-
- let(:table_name) { 'members' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 10 }
- let(:pause_ms) { 0 }
-
- let(:owner_access) { 50 }
- let(:maintainer_access) { 40 }
- let(:developer_access) { 30 }
-
- subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) }
-
- before do
- users_table.create!(id: 101, name: "user1", email: "user1@example.com", projects_limit: 5)
- users_table.create!(id: 102, name: "user2", email: "user2@example.com", projects_limit: 5)
-
- namespaces_table.create!(id: 201, name: 'user1s-namespace', path: 'user1s-namespace-path', type: 'User', owner_id: 101)
- namespaces_table.create!(id: 202, name: 'user2s-namespace', path: 'user2s-namespace-path', type: 'User', owner_id: 102)
- namespaces_table.create!(id: 203, name: 'group', path: 'group', type: 'Group')
- namespaces_table.create!(id: 204, name: 'project-namespace', path: 'project-namespace-path', type: 'Project')
-
- projects_table.create!(id: 301, name: 'user1-namespace-project', path: 'project-path-1', namespace_id: 201)
- projects_table.create!(id: 302, name: 'user2-namespace-project', path: 'project-path-2', namespace_id: 202)
- projects_table.create!(id: 303, name: 'user2s-namespace-project2', path: 'project-path-3', namespace_id: 202)
- projects_table.create!(id: 304, name: 'group-project3', path: 'group-project-path-3', namespace_id: 203)
-
- # user1 member of their own namespace project, maintainer access (change)
- create_project_member(id: 1, user_id: 101, project_id: 301, level: maintainer_access)
-
- # user2 member of their own namespace project, owner access (no change)
- create_project_member(id: 2, user_id: 102, project_id: 302, level: owner_access)
-
- # user1 member of user2's personal namespace project, maintainer access (no change)
- create_project_member(id: 3, user_id: 101, project_id: 302, level: maintainer_access)
-
- # user1 member of group project, maintainer access (no change)
- create_project_member(id: 4, user_id: 101, project_id: 304, level: maintainer_access)
-
- # user1 member of group, Maintainer role (no change)
- create_group_member(id: 5, user_id: 101, group_id: 203, level: maintainer_access)
-
- # user2 member of their own namespace project, maintainer access, but out of batch range (no change)
- create_project_member(id: 601, user_id: 102, project_id: 303, level: maintainer_access)
- end
-
- it 'migrates MAINTAINER membership records for personal namespaces to OWNER', :aggregate_failures do
- expect(members_table.where(access_level: owner_access).count).to eq 1
- expect(members_table.where(access_level: maintainer_access).count).to eq 5
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(3)
- expect(members_table.where(access_level: owner_access).pluck(:id)).to match_array([1, 2])
- expect(members_table.where(access_level: maintainer_access).pluck(:id)).to match_array([3, 4, 5, 601])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- def create_group_member(id:, user_id:, group_id:, level:)
- members_table.create!(id: id, user_id: user_id, source_id: group_id, access_level: level, source_type: "Namespace", type: "GroupMember", notification_level: 3)
- end
-
- def create_project_member(id:, user_id:, project_id:, level:)
- members_table.create!(id: id, user_id: user_id, source_id: project_id, access_level: level, source_type: "Namespace", type: "ProjectMember", notification_level: 3)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb b/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb
deleted file mode 100644
index 8bc6bb8ae0a..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigrateShimoConfluenceIntegrationCategory, schema: 20220326161803 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:integrations) { table(:integrations) }
- let(:perform) { described_class.new.perform(1, 5) }
-
- before do
- namespace = namespaces.create!(name: 'test', path: 'test')
- projects.create!(id: 1, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab')
- integrations.create!(id: 1, active: true, type_new: "Integrations::SlackSlashCommands",
- category: 'chat', project_id: 1)
- integrations.create!(id: 3, active: true, type_new: "Integrations::Confluence", category: 'common', project_id: 1)
- integrations.create!(id: 5, active: true, type_new: "Integrations::Shimo", category: 'common', project_id: 1)
- end
-
- describe '#up' do
- it 'updates category to third_party_wiki for Shimo and Confluence' do
- perform
-
- expect(integrations.where(category: 'third_party_wiki').count).to eq(2)
- expect(integrations.where(category: 'chat').count).to eq(1)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb b/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb
index f671a673a08..facc3c435da 100644
--- a/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::NullifyCreatorIdColumnOfOrphanedProjects, feature_category: :projects,
- schema: 20230130073109 do
+RSpec.describe Gitlab::BackgroundMigration::NullifyCreatorIdColumnOfOrphanedProjects,
+ feature_category: :groups_and_projects, schema: 20230130073109 do
let(:users) { table(:users) }
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
diff --git a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
deleted file mode 100644
index 5b234679e22..00000000000
--- a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds,
- :suppress_gitlab_schemas_validate_connection, migration: :gitlab_ci, schema: 20220223112304 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:ci_runners) { table(:ci_runners) }
- let(:ci_pipelines) { table(:ci_pipelines) }
- let(:ci_builds) { table(:ci_builds) }
-
- subject { described_class.new }
-
- let(:helpers) do
- ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers)
- end
-
- before do
- helpers.remove_foreign_key_if_exists(:ci_builds, column: :runner_id)
- end
-
- after do
- helpers.add_concurrent_foreign_key(
- :ci_builds, :ci_runners, column: :runner_id, on_delete: :nullify, validate: false
- )
- end
-
- describe '#perform' do
- let(:namespace) { namespaces.create!(name: 'test', path: 'test', type: 'Group') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'test') }
-
- it 'nullifies runner_id for orphan ci_builds in range' do
- pipeline = ci_pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success')
- ci_runners.create!(id: 2, runner_type: 'project_type')
-
- ci_builds.create!(id: 5, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 2)
- ci_builds.create!(id: 7, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 4)
- ci_builds.create!(id: 8, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 5)
- ci_builds.create!(id: 9, type: 'Ci::Build', commit_id: pipeline.id, runner_id: 6)
-
- subject.perform(4, 8, :ci_builds, :id, 10, 0)
-
- expect(ci_builds.all).to contain_exactly(
- an_object_having_attributes(id: 5, runner_id: 2),
- an_object_having_attributes(id: 7, runner_id: nil),
- an_object_having_attributes(id: 8, runner_id: nil),
- an_object_having_attributes(id: 9, runner_id: 6)
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb b/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb
deleted file mode 100644
index 477167c9074..00000000000
--- a/spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateContainerRepositoryMigrationPlan, schema: 20220316202640 do
- let!(:container_repositories) { table(:container_repositories) }
- let!(:projects) { table(:projects) }
- let!(:namespaces) { table(:namespaces) }
- let!(:gitlab_subscriptions) { table(:gitlab_subscriptions) }
- let!(:plans) { table(:plans) }
- let!(:namespace_statistics) { table(:namespace_statistics) }
-
- let!(:namepace1) { namespaces.create!(id: 1, type: 'Group', name: 'group1', path: 'group1', traversal_ids: [1]) }
- let!(:namepace2) { namespaces.create!(id: 2, type: 'Group', name: 'group2', path: 'group2', traversal_ids: [2]) }
- let!(:namepace3) { namespaces.create!(id: 3, type: 'Group', name: 'group3', path: 'group3', traversal_ids: [3]) }
- let!(:sub_namespace) { namespaces.create!(id: 4, type: 'Group', name: 'group3', path: 'group3', parent_id: 1, traversal_ids: [1, 4]) }
- let!(:plan1) { plans.create!(id: 1, name: 'plan1') }
- let!(:plan2) { plans.create!(id: 2, name: 'plan2') }
- let!(:gitlab_subscription1) { gitlab_subscriptions.create!(id: 1, namespace_id: 1, hosted_plan_id: 1) }
- let!(:gitlab_subscription2) { gitlab_subscriptions.create!(id: 2, namespace_id: 2, hosted_plan_id: 2) }
- let!(:project1) { projects.create!(id: 1, name: 'project1', path: 'project1', namespace_id: 4) }
- let!(:project2) { projects.create!(id: 2, name: 'project2', path: 'project2', namespace_id: 2) }
- let!(:project3) { projects.create!(id: 3, name: 'project3', path: 'project3', namespace_id: 3) }
- let!(:container_repository1) { container_repositories.create!(id: 1, name: 'cr1', project_id: 1) }
- let!(:container_repository2) { container_repositories.create!(id: 2, name: 'cr2', project_id: 2) }
- let!(:container_repository3) { container_repositories.create!(id: 3, name: 'cr3', project_id: 3) }
-
- let(:migration) { described_class.new }
-
- subject do
- migration.perform(1, 4)
- end
-
- it 'updates the migration_plan to match the actual plan', :aggregate_failures do
- expect(Gitlab::Database::BackgroundMigrationJob).to receive(:mark_all_as_succeeded)
- .with('PopulateContainerRepositoryMigrationPlan', [1, 4]).and_return(true)
-
- subject
-
- expect(container_repository1.reload.migration_plan).to eq('plan1')
- expect(container_repository2.reload.migration_plan).to eq('plan2')
- expect(container_repository3.reload.migration_plan).to eq(nil)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb
deleted file mode 100644
index 4a7d52ee784..00000000000
--- a/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateNamespaceStatistics do
- let!(:namespaces) { table(:namespaces) }
- let!(:namespace_statistics) { table(:namespace_statistics) }
- let!(:dependency_proxy_manifests) { table(:dependency_proxy_manifests) }
- let!(:dependency_proxy_blobs) { table(:dependency_proxy_blobs) }
-
- let!(:group1) { namespaces.create!(id: 10, type: 'Group', name: 'group1', path: 'group1') }
- let!(:group2) { namespaces.create!(id: 20, type: 'Group', name: 'group2', path: 'group2') }
-
- let!(:group1_manifest) do
- dependency_proxy_manifests.create!(group_id: 10, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123')
- end
-
- let!(:group2_manifest) do
- dependency_proxy_manifests.create!(group_id: 20, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123')
- end
-
- let!(:group1_stats) { namespace_statistics.create!(id: 10, namespace_id: 10) }
-
- let(:ids) { namespaces.pluck(:id) }
- let(:statistics) { [] }
-
- subject(:perform) { described_class.new.perform(ids, statistics) }
-
- it 'creates/updates all namespace_statistics and updates root storage statistics', :aggregate_failures do
- expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group1.id)
- expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group2.id)
-
- expect { perform }.to change(namespace_statistics, :count).from(1).to(2)
-
- namespace_statistics.all.each do |stat|
- expect(stat.dependency_proxy_size).to eq 20
- expect(stat.storage_size).to eq 20
- end
- end
-
- context 'when just a stat is passed' do
- let(:statistics) { [:dependency_proxy_size] }
-
- it 'calls the statistics update service with just that stat' do
- expect(Groups::UpdateStatisticsService)
- .to receive(:new)
- .with(anything, statistics: [:dependency_proxy_size])
- .twice.and_call_original
-
- perform
- end
- end
-
- context 'when a statistics update fails' do
- before do
- error_response = instance_double(ServiceResponse, message: 'an error', error?: true)
-
- allow_next_instance_of(Groups::UpdateStatisticsService) do |instance|
- allow(instance).to receive(:execute).and_return(error_response)
- end
- end
-
- it 'logs an error' do
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:error).twice
- end
-
- perform
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb b/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb
deleted file mode 100644
index e72e3392210..00000000000
--- a/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateTopicsNonPrivateProjectsCount, schema: 20220125122640 do
- it 'correctly populates the non private projects counters' do
- namespaces = table(:namespaces)
- projects = table(:projects)
- topics = table(:topics)
- project_topics = table(:project_topics)
-
- group = namespaces.create!(name: 'group', path: 'group')
- project_public = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- project_internal = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
- project_private = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- topic_1 = topics.create!(name: 'Topic1')
- topic_2 = topics.create!(name: 'Topic2')
- topic_3 = topics.create!(name: 'Topic3')
- topic_4 = topics.create!(name: 'Topic4')
- topic_5 = topics.create!(name: 'Topic5')
- topic_6 = topics.create!(name: 'Topic6')
- topic_7 = topics.create!(name: 'Topic7')
- topic_8 = topics.create!(name: 'Topic8')
-
- project_topics.create!(topic_id: topic_1.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_2.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_3.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_4.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_4.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_5.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_5.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_6.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_6.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_7.id, project_id: project_public.id)
- project_topics.create!(topic_id: topic_7.id, project_id: project_internal.id)
- project_topics.create!(topic_id: topic_7.id, project_id: project_private.id)
- project_topics.create!(topic_id: topic_8.id, project_id: project_public.id)
-
- subject.perform(topic_1.id, topic_7.id)
-
- expect(topic_1.reload.non_private_projects_count).to eq(1)
- expect(topic_2.reload.non_private_projects_count).to eq(1)
- expect(topic_3.reload.non_private_projects_count).to eq(0)
- expect(topic_4.reload.non_private_projects_count).to eq(2)
- expect(topic_5.reload.non_private_projects_count).to eq(1)
- expect(topic_6.reload.non_private_projects_count).to eq(1)
- expect(topic_7.reload.non_private_projects_count).to eq(2)
- expect(topic_8.reload.non_private_projects_count).to eq(0)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb
deleted file mode 100644
index c0470f26d9e..00000000000
--- a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityReads, :migration, schema: 20220326161803 do
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerability_reads) { table(:vulnerability_reads) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
- let(:vulnerability_issue_links) { table(:vulnerability_issue_links) }
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let(:sub_batch_size) { 1000 }
-
- before do
- vulnerabilities_findings.connection.execute 'ALTER TABLE vulnerability_occurrences DISABLE TRIGGER "trigger_insert_or_update_vulnerability_reads_from_occurrences"'
- vulnerabilities.connection.execute 'ALTER TABLE vulnerabilities DISABLE TRIGGER "trigger_update_vulnerability_reads_on_vulnerability_update"'
- vulnerability_issue_links.connection.execute 'ALTER TABLE vulnerability_issue_links DISABLE TRIGGER "trigger_update_has_issues_on_vulnerability_issue_links_update"'
-
- 10.times.each do |x|
- vulnerability = create_vulnerability!(
- project_id: project.id,
- report_type: 7,
- author_id: user.id
- )
- identifier = table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: Digest::SHA1.hexdigest(vulnerability.id.to_s),
- name: 'Identifier for UUIDv5')
-
- create_finding!(
- vulnerability_id: vulnerability.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id
- )
- end
- end
-
- it 'creates vulnerability_reads for the given records' do
- described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size)
-
- expect(vulnerability_reads.count).to eq(10)
- end
-
- it 'does not create new records when records already exists' do
- described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size)
- described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size)
-
- expect(vulnerability_reads.count).to eq(10)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location: location,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-end
diff --git a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
deleted file mode 100644
index bff803e2035..00000000000
--- a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
+++ /dev/null
@@ -1,266 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration, schema: 20220326161803 do
- include MigrationsHelpers
-
- RSpec.shared_examples 'backfills project namespaces' do
- context 'when migrating data', :aggregate_failures do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
-
- let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
- let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
-
- let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
- let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
-
- let(:child_nodes_count) { 2 }
- let(:tree_depth) { 3 }
-
- let(:backfilled_namespace) { nil }
-
- before do
- BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
- end
-
- describe '#up' do
- shared_examples 'back-fill project namespaces' do
- it 'back-fills all project namespaces' do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- projects_count = ::Project.count
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_count = ::Namespace.where(type: 'Project').count
- migration = described_class.new
-
- expect(projects_count).not_to eq(project_namespaces_count)
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
-
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
- check_projects_in_sync_with(Namespace.where(type: 'Project'))
- end
-
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
-
- it 'back-fills project namespaces for the specified group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
-
- migration = described_class.new
-
- expect(project_namespaces_in_hierarchy.count).to eq(0)
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect(group_projects_count).to eq(14)
- expect(project_namespaces_in_hierarchy.count).to eq(0)
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
-
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- check_projects_in_sync_with(project_namespaces_in_hierarchy)
- end
- end
-
- context 'when projects already have project namespaces' do
- before do
- hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
- start_id = hierarchy1_projects.minimum(:id)
- end_id = hierarchy1_projects.maximum(:id)
-
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
- end
-
- it 'does not duplicate project namespaces' do
- # check there are already some project namespaces but not for all
- projects_count = ::Project.count
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces = ::Namespace.where(type: 'Project')
- migration = described_class.new
-
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
- expect(projects_count).not_to eq(project_namespaces.count)
-
- # run migration again to test we do not generate extra project namespaces
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
-
- expect(projects_count).to eq(project_namespaces.count)
- end
- end
- end
-
- it 'checks no project namespaces exist in the defined hierarchies' do
- hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
- hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
- hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
- hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
-
- expect(hierarchy1_project_namespaces).to be_empty
- expect(hierarchy2_project_namespaces).to be_empty
- expect(hierarchy1_projects_count).to eq(14)
- expect(hierarchy2_projects_count).to eq(14)
- end
-
- context 'back-fill project namespaces in a single batch' do
- it_behaves_like 'back-fill project namespaces'
- end
-
- context 'back-fill project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
- end
-
- it_behaves_like 'back-fill project namespaces'
- end
- end
-
- describe '#down' do
- before do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- # back-fill first
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
- end
-
- shared_examples 'cleanup project namespaces' do
- it 'removes project namespaces' do
- projects_count = ::Project.count
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- migration = described_class.new
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
-
- expect(projects_count).to be > 0
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
-
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
-
- expect(::Project.count).to be > 0
- expect(::Namespace.where(type: 'Project').count).to eq(0)
- end
-
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
-
- it 'removes project namespaces only for the specific group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
- migration = described_class.new
-
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
-
- expect(::Namespace.where(type: 'Project').count).to be > 0
- expect(project_namespaces_in_hierarchy.count).to eq(0)
- end
- end
- end
-
- context 'cleanup project namespaces in a single batch' do
- it_behaves_like 'cleanup project namespaces'
- end
-
- context 'cleanup project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
- end
-
- it_behaves_like 'cleanup project namespaces'
- end
- end
- end
- end
-
- it_behaves_like 'backfills project namespaces'
-
- context 'when namespaces.id is bigint' do
- before do
- namespaces.connection.execute("ALTER TABLE namespaces ALTER COLUMN id TYPE bigint")
- end
-
- it_behaves_like 'backfills project namespaces'
- end
-
- def base_ancestor(ancestor)
- ::Namespace.where(id: ancestor.id)
- end
-
- def project_namespaces_in_hierarchy(base_node)
- Gitlab::ObjectHierarchy.new(base_node).base_and_descendants.where(type: 'Project')
- end
-
- def check_projects_in_sync_with(namespaces)
- project_namespaces_attrs = namespaces.order(:id).pluck(:id, :name, :path, :parent_id, :visibility_level, :shared_runners_enabled)
- corresponding_projects_attrs = Project.where(project_namespace_id: project_namespaces_attrs.map(&:first))
- .order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
-
- expect(project_namespaces_attrs).to eq(corresponding_projects_attrs)
- end
-end
-
-module BackfillProjectNamespaces
- class TreeGenerator
- def initialize(namespaces, projects, parent_nodes, child_nodes_count, tree_depth)
- parent_nodes_ids = parent_nodes.map(&:id)
-
- @namespaces = namespaces
- @projects = projects
- @subgroups_depth = tree_depth
- @resource_count = child_nodes_count
- @all_groups = [parent_nodes_ids]
- end
-
- def build_tree
- (1..@subgroups_depth).each do |level|
- parent_level = level - 1
- current_level = level
- parent_groups = @all_groups[parent_level]
-
- parent_groups.each do |parent_id|
- @resource_count.times do |i|
- group_path = "child#{i}_level#{level}"
- project_path = "project#{i}_level#{level}"
- sub_group = @namespaces.create!(name: group_path, path: group_path, parent_id: parent_id, visibility_level: 20, type: 'Group')
- @projects.create!(name: project_path, path: project_path, namespace_id: sub_group.id, visibility_level: 20)
-
- track_group_id(current_level, sub_group.id)
- end
- end
- end
- end
-
- def track_group_id(depth_level, group_id)
- @all_groups[depth_level] ||= []
- @all_groups[depth_level] << group_id
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
deleted file mode 100644
index eabc012f98b..00000000000
--- a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveAllTraceExpirationDates, :migration,
- :suppress_gitlab_schemas_validate_connection, schema: 20220131000001 do
- subject(:perform) { migration.perform(1, 99) }
-
- let(:migration) { described_class.new }
-
- let(:trace_in_range) { create_trace!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
- let(:trace_outside_range) { create_trace!(id: 40, created_at: Date.new(2020, 06, 22), expire_at: Date.new(2021, 01, 22)) }
- let(:trace_without_expiry) { create_trace!(id: 30, created_at: Date.new(2020, 06, 21), expire_at: nil) }
- let(:archive_in_range) { create_archive!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
- let(:trace_outside_id_range) { create_trace!(id: 100, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) }
-
- before do
- table(:namespaces).create!(id: 1, name: 'the-namespace', path: 'the-path')
- table(:projects).create!(id: 1, name: 'the-project', namespace_id: 1)
- table(:ci_builds).create!(id: 1, allow_failure: false)
- end
-
- context 'for self-hosted instances' do
- it 'sets expire_at for artifacts in range to nil' do
- expect { perform }.not_to change { trace_in_range.reload.expire_at }
- end
-
- it 'does not change expire_at timestamps that are not set to midnight' do
- expect { perform }.not_to change { trace_outside_range.reload.expire_at }
- end
-
- it 'does not change expire_at timestamps that are set to midnight on a day other than the 22nd' do
- expect { perform }.not_to change { trace_without_expiry.reload.expire_at }
- end
-
- it 'does not touch artifacts outside id range' do
- expect { perform }.not_to change { archive_in_range.reload.expire_at }
- end
-
- it 'does not touch artifacts outside date range' do
- expect { perform }.not_to change { trace_outside_id_range.reload.expire_at }
- end
- end
-
- private
-
- def create_trace!(**args)
- table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 3)
- end
-
- def create_archive!(**args)
- table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 1)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb b/spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb
new file mode 100644
index 00000000000..0cdfe7bb945
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_invalid_deploy_access_level_groups_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveInvalidDeployAccessLevelGroups,
+ :migration, schema: 20230519011151, feature_category: :continuous_delivery do
+ let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let!(:project) { table(:projects).create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let!(:group) { table(:namespaces).create!(name: 'group', path: 'group', type: 'Group') }
+ let!(:user) { table(:users).create!(email: 'deployer@example.com', username: 'deployer', projects_limit: 0) }
+ let!(:protected_environment) { table(:protected_environments).create!(project_id: project.id, name: 'production') }
+
+ let(:migration) do
+ described_class.new(
+ start_id: 1, end_id: 1000,
+ batch_table: :protected_environment_deploy_access_levels, batch_column: :id,
+ sub_batch_size: 10, pause_ms: 0,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ describe '#perform' do
+ let!(:deploy_access_level_access_level) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, access_level: 40)
+ end
+
+ let!(:deploy_access_level_user) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, user_id: user.id)
+ end
+
+ let!(:deploy_access_level_group) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, group_id: group.id)
+ end
+
+ let!(:deploy_access_level_namespace) do
+ table(:protected_environment_deploy_access_levels)
+ .create!(protected_environment_id: protected_environment.id, group_id: namespace.id)
+ end
+
+ it 'backfill tiers for all environments in range' do
+ expect(deploy_access_level_access_level).to be_present
+ expect(deploy_access_level_user).to be_present
+ expect(deploy_access_level_group).to be_present
+ expect(deploy_access_level_namespace).to be_present
+
+ migration.perform
+
+ expect { deploy_access_level_access_level.reload }.not_to raise_error
+ expect { deploy_access_level_user.reload }.not_to raise_error
+ expect { deploy_access_level_group.reload }.not_to raise_error
+ expect { deploy_access_level_namespace.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
deleted file mode 100644
index 60ee61cf50a..00000000000
--- a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
+++ /dev/null
@@ -1,174 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration,
- :suppress_gitlab_schemas_validate_connection, schema: 20220326161803 do
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:users) { table(:users) }
- let(:user) { create_user! }
- let(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) }
- let(:scanners) { table(:vulnerability_scanners) }
- let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
- let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') }
- let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerability_findings) { table(:vulnerability_occurrences) }
- let(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) }
- let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
- let(:vulnerability_identifier) do
- vulnerability_identifiers.create!(
- id: 1244459,
- project_id: project.id,
- external_type: 'vulnerability-identifier',
- external_id: 'vulnerability-identifier',
- fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45',
- name: 'vulnerability identifier')
- end
-
- let!(:vulnerability_for_first_duplicate) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:first_finding_duplicate) do
- create_finding!(
- id: 5606961,
- uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e",
- vulnerability_id: vulnerability_for_first_duplicate.id,
- report_type: 0,
- location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: scanner1.id,
- project_id: project.id
- )
- end
-
- let!(:vulnerability_for_second_duplicate) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:second_finding_duplicate) do
- create_finding!(
- id: 8765432,
- uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5",
- vulnerability_id: vulnerability_for_second_duplicate.id,
- report_type: 0,
- location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: scanner2.id,
- project_id: project.id
- )
- end
-
- let!(:vulnerability_for_third_duplicate) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:third_finding_duplicate) do
- create_finding!(
- id: 8832995,
- uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4",
- vulnerability_id: vulnerability_for_third_duplicate.id,
- report_type: 0,
- location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: scanner3.id,
- project_id: project.id
- )
- end
-
- let!(:unrelated_finding) do
- create_finding!(
- id: 9999999,
- vulnerability_id: nil,
- report_type: 1,
- location_fingerprint: 'random_location_fingerprint',
- primary_identifier_id: vulnerability_identifier.id,
- scanner_id: unrelated_scanner.id,
- project_id: project.id
- )
- end
-
- subject { described_class.new.perform(first_finding_duplicate.id, unrelated_finding.id) }
-
- before do
- 4.times do
- create_finding_pipeline!(project_id: project.id, finding_id: first_finding_duplicate.id)
- create_finding_pipeline!(project_id: project.id, finding_id: second_finding_duplicate.id)
- create_finding_pipeline!(project_id: project.id, finding_id: third_finding_duplicate.id)
- create_finding_pipeline!(project_id: project.id, finding_id: unrelated_finding.id)
- end
- end
-
- it 'removes Vulnerabilities::OccurrencePipelines for matching Vulnerabilities::Finding' do
- expect(vulnerability_findings.count).to eq(4)
- expect(vulnerability_finding_pipelines.count).to eq(16)
-
- expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8)
- .and change(vulnerability_findings, :count).from(4).to(2)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- params = {
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: vulnerability_identifier.id,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- }
- params[:id] = id unless id.nil?
- vulnerability_findings.create!(params)
- end
- # rubocop:enable Metrics/ParameterLists
-
- def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now)
- table(:users).create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- user_type: user_type,
- confirmed_at: confirmed_at
- )
- end
-
- def create_finding_pipeline!(project_id:, finding_id:)
- pipeline = table(:ci_pipelines).create!(project_id: project_id)
- vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb b/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb
index c45c402ab9d..126e928fa77 100644
--- a/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RemoveProjectGroupLinkWithMissingGroups, :migration,
- feature_category: :subgroups, schema: 20230206172702 do
+ feature_category: :groups_and_projects, schema: 20230206172702 do
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
let(:project_group_links) { table(:project_group_links) }
diff --git a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
deleted file mode 100644
index 32134b99e37..00000000000
--- a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveVulnerabilityFindingLinks, :migration, schema: 20211202041233 do
- let(:vulnerability_findings) { table(:vulnerability_occurrences) }
- let(:finding_links) { table(:vulnerability_finding_links) }
-
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'scanner', name: 'scanner') }
- let(:vulnerability_identifier) do
- table(:vulnerability_identifiers).create!(
- project_id: project.id,
- external_type: 'vulnerability-identifier',
- external_id: 'vulnerability-identifier',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'vulnerability identifier')
- end
-
- # vulnerability findings
- let!(:findings) do
- Array.new(2) do |id|
- vulnerability_findings.create!(
- project_id: project.id,
- name: 'Vulnerability Name',
- severity: 7,
- confidence: 7,
- report_type: 0,
- project_fingerprint: '123qweasdzxc',
- scanner_id: scanner.id,
- primary_identifier_id: vulnerability_identifier.id,
- location_fingerprint: "location_fingerprint_#{id}",
- metadata_version: 'metadata_version',
- raw_metadata: 'raw_metadata',
- uuid: SecureRandom.uuid
- )
- end
- end
-
- # vulnerability finding links
- let!(:links) do
- {
- findings.first => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.first.id, name: "Link Name 1", url: "link_url1_#{id}.example") },
- findings.second => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.second.id, name: "Link Name 2", url: "link_url2_#{id}.example") }
- }
- end
-
- it 'removes vulnerability links' do
- expect do
- subject.perform(links[findings.first].first.id, links[findings.second].last.id)
- end.to change { finding_links.count }.from(10).to(0)
-
- expect(finding_links.all).to be_empty
- end
-
- it 'only deletes vulnerability links for the current batch' do
- expected_links = [finding_links.where(vulnerability_occurrence_id: findings.second.id)].flatten
-
- expect do
- subject.perform(links[findings.first].first.id, links[findings.first].last.id)
- end.to change { finding_links.count }.from(10).to(5)
-
- expect(finding_links.all).to match_array(expected_links)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
deleted file mode 100644
index 71020746fa7..00000000000
--- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_on_projects_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValuesOnProjects, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- subject(:background_migration) { described_class.new }
-
- before do
- namespaces.create!(id: 123, name: 'sample', path: 'sample')
-
- projects.create!(id: 1, namespace_id: 123, runners_token_encrypted: 'duplicate')
- projects.create!(id: 2, namespace_id: 123, runners_token_encrypted: 'a-runners-token')
- projects.create!(id: 3, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
- projects.create!(id: 4, namespace_id: 123, runners_token_encrypted: nil)
- projects.create!(id: 5, namespace_id: 123, runners_token_encrypted: 'duplicate-2')
- projects.create!(id: 6, namespace_id: 123, runners_token_encrypted: 'duplicate')
- projects.create!(id: 7, namespace_id: 123, runners_token_encrypted: 'another-runners-token')
- projects.create!(id: 8, namespace_id: 123, runners_token_encrypted: 'another-runners-token')
- end
-
- describe '#up' do
- it 'nullifies duplicate tokens', :aggregate_failures do
- background_migration.perform(1, 2)
- background_migration.perform(3, 4)
-
- expect(projects.count).to eq(8)
- expect(projects.all.pluck(:id, :runners_token_encrypted).to_h).to eq(
- {
- 1 => nil,
- 2 => 'a-runners-token',
- 3 => nil,
- 4 => nil,
- 5 => 'duplicate-2',
- 6 => 'duplicate',
- 7 => 'another-runners-token',
- 8 => 'another-runners-token'
- })
- expect(projects.pluck(:runners_token_encrypted).uniq).to match_array [
- nil, 'a-runners-token', 'duplicate', 'duplicate-2', 'another-runners-token'
- ]
- end
-
- it 'does not touch projects outside id range' do
- expect do
- background_migration.perform(1, 2)
- end.not_to change { projects.where(id: [3..8]).each(&:reload).map(&:updated_at) }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
deleted file mode 100644
index 7d3df69bee2..00000000000
--- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_on_projects_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValuesOnProjects, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- subject(:background_migration) { described_class.new }
-
- before do
- namespaces.create!(id: 123, name: 'sample', path: 'sample')
-
- projects.create!(id: 1, namespace_id: 123, runners_token: 'duplicate')
- projects.create!(id: 2, namespace_id: 123, runners_token: 'a-runners-token')
- projects.create!(id: 3, namespace_id: 123, runners_token: 'duplicate-2')
- projects.create!(id: 4, namespace_id: 123, runners_token: nil)
- projects.create!(id: 5, namespace_id: 123, runners_token: 'duplicate-2')
- projects.create!(id: 6, namespace_id: 123, runners_token: 'duplicate')
- projects.create!(id: 7, namespace_id: 123, runners_token: 'another-runners-token')
- projects.create!(id: 8, namespace_id: 123, runners_token: 'another-runners-token')
- end
-
- describe '#up' do
- it 'nullifies duplicate tokens', :aggregate_failures do
- background_migration.perform(1, 2)
- background_migration.perform(3, 4)
-
- expect(projects.count).to eq(8)
- expect(projects.all.pluck(:id, :runners_token).to_h).to eq(
- {
- 1 => nil,
- 2 => 'a-runners-token',
- 3 => nil,
- 4 => nil,
- 5 => 'duplicate-2',
- 6 => 'duplicate',
- 7 => 'another-runners-token',
- 8 => 'another-runners-token'
- })
- expect(projects.pluck(:runners_token).uniq).to match_array [
- nil, 'a-runners-token', 'duplicate', 'duplicate-2', 'another-runners-token'
- ]
- end
-
- it 'does not touch projects outside id range' do
- expect do
- background_migration.perform(1, 2)
- end.not_to change { projects.where(id: [3..8]).each(&:reload).map(&:updated_at) }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb
deleted file mode 100644
index 908f11aabc3..00000000000
--- a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsNullSpentAt, schema: 20211215090620 do
- let!(:previous_time) { 10.days.ago }
- let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
- let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:issue) { table(:issues).create!(project_id: project.id) }
- let!(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') }
- let!(:timelog1) { create_timelog!(issue_id: issue.id) }
- let!(:timelog2) { create_timelog!(merge_request_id: merge_request.id) }
- let!(:timelog3) { create_timelog!(issue_id: issue.id, spent_at: previous_time) }
- let!(:timelog4) { create_timelog!(merge_request_id: merge_request.id, spent_at: previous_time) }
-
- subject(:background_migration) { described_class.new }
-
- before do
- table(:timelogs).where.not(id: [timelog3.id, timelog4.id]).update_all(spent_at: nil)
- end
-
- describe '#perform' do
- it 'sets correct spent_at' do
- background_migration.perform(timelog1.id, timelog4.id)
-
- expect(timelog1.reload.spent_at).to be_like_time(timelog1.created_at)
- expect(timelog2.reload.spent_at).to be_like_time(timelog2.created_at)
- expect(timelog3.reload.spent_at).to be_like_time(previous_time)
- expect(timelog4.reload.spent_at).to be_like_time(previous_time)
- expect(timelog3.reload.spent_at).not_to be_like_time(timelog3.created_at)
- expect(timelog4.reload.spent_at).not_to be_like_time(timelog4.created_at)
- end
- end
-
- private
-
- def create_timelog!(**args)
- table(:timelogs).create!(**args, time_spent: 1)
- end
-end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 48ceda9e8d8..4c94ecfe745 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integrations do
+RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, feature_category: :importers do
include ImportSpecHelper
before do
@@ -104,11 +104,13 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
- author: 'other',
+ author: pull_request_author,
created_at: Time.now,
updated_at: Time.now)
end
+ let(:pull_request_author) { 'other' }
+
let(:author_line) { "*Created by: someuser*\n\n" }
before do
@@ -168,6 +170,16 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
expect(reply_note.note).to include(author_line)
end
+ context 'when author is blank' do
+ let(:pull_request_author) { nil }
+
+ it 'adds created by anonymous in the description', :aggregate_failures do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ expect(MergeRequest.first.description).to include('Created by: Anonymous')
+ end
+ end
+
context 'when user exists in GitLab' do
let!(:existing_user) { create(:user, username: 'someuser') }
let!(:identity) { create(:identity, provider: 'bitbucket', extern_uid: existing_user.username, user: existing_user) }
@@ -218,6 +230,17 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
end
end
+ context "when target_branch_sha is blank" do
+ let(:target_branch_sha) { nil }
+
+ it 'creates the merge request with no target branch', :aggregate_failures do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request = MergeRequest.first
+ expect(merge_request.target_branch_sha).to eq(nil)
+ end
+ end
+
context 'metrics' do
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
@@ -235,6 +258,29 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
subject.execute
end
end
+
+ context 'when pull request was already imported' do
+ let(:pull_request_already_imported) do
+ instance_double(
+ BitbucketServer::Representation::PullRequest,
+ iid: 11)
+ end
+
+ let(:cache_key) do
+ format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :pull_requests)
+ end
+
+ before do
+ allow(subject.client).to receive(:pull_requests).and_return([pull_request, pull_request_already_imported])
+ Gitlab::Cache::Import::Caching.set_add(cache_key, pull_request_already_imported.iid)
+ end
+
+ it 'does not import the previously imported pull requests', :aggregate_failures do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, pull_request.iid)).to eq(true)
+ end
+ end
end
context 'issues statuses' do
@@ -405,6 +451,24 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integration
expect(importer.errors).to be_empty
end
end
+
+ context 'when issue was already imported' do
+ let(:cache_key) do
+ format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :issues)
+ end
+
+ before do
+ Gitlab::Cache::Import::Caching.set_add(cache_key, sample_issues_statuses.first[:id])
+ end
+
+ it 'does not import previously imported issues', :aggregate_failures do
+ expect { subject.execute }.to change { Issue.count }.by(sample_issues_statuses.size - 1)
+
+ sample_issues_statuses.each do |sample_issues_status|
+ expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, sample_issues_status[:id])).to eq(true)
+ end
+ end
+ end
end
context 'metrics' do
diff --git a/spec/lib/gitlab/cache/json_cache_spec.rb b/spec/lib/gitlab/cache/json_cache_spec.rb
new file mode 100644
index 00000000000..05126319ef9
--- /dev/null
+++ b/spec/lib/gitlab/cache/json_cache_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::JsonCache, feature_category: :shared do
+ let_it_be(:broadcast_message) { create(:broadcast_message) }
+
+ let(:backend) { instance_double(ActiveSupport::Cache::RedisCacheStore).as_null_object }
+ let(:namespace) { 'geo' }
+ let(:key) { 'foo' }
+ let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
+
+ subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
+
+ describe '#active?' do
+ context 'when backend respond to active? method' do
+ it 'delegates to the underlying cache implementation' do
+ backend = instance_double(Gitlab::NullRequestStore, active?: false)
+
+ cache = described_class.new(namespace: namespace, backend: backend)
+
+ expect(cache.active?).to eq(false)
+ end
+ end
+
+ context 'when backend does not respond to active? method' do
+ it 'returns true' do
+ backend = instance_double(ActiveSupport::Cache::RedisCacheStore)
+
+ cache = described_class.new(namespace: namespace, backend: backend)
+
+ expect(cache.active?).to eq(true)
+ end
+ end
+ end
+
+ describe '#expire' do
+ it 'calls delete from the backend on the cache_key' do
+ cache = Class.new(described_class) do
+ def expanded_cache_key(_key)
+ ['_expanded_cache_key_']
+ end
+ end.new(namespace: namespace, backend: backend)
+
+ cache.expire(key)
+
+ expect(backend).to have_received(:delete).with('_expanded_cache_key_')
+ end
+
+ it 'raises an error' do
+ expect { cache.expire(key) }.to raise_error(NoMethodError)
+ end
+ end
+
+ describe '#read' do
+ it 'raises an error' do
+ expect { cache.read(key) }.to raise_error(NoMethodError)
+ end
+ end
+
+ describe '#write' do
+ it 'raises an error' do
+ expect { cache.write(key, true) }.to raise_error(NoMethodError)
+ end
+ end
+
+ describe '#fetch' do
+ it 'raises an error' do
+ expect { cache.fetch(key) }.to raise_error(NoMethodError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb b/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb
new file mode 100644
index 00000000000..c4ec393c3ac
--- /dev/null
+++ b/spec/lib/gitlab/cache/json_caches/json_keyed_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::JsonCaches::JsonKeyed, feature_category: :shared do
+ let_it_be(:broadcast_message) { create(:broadcast_message) }
+
+ let(:backend) { instance_double(ActiveSupport::Cache::RedisCacheStore).as_null_object }
+ let(:namespace) { 'geo' }
+ let(:key) { 'foo' }
+ let(:expanded_key) { "#{namespace}:#{key}" }
+ let(:cache_key_strategy) { :revision }
+ let(:nested_cache_result) { nest_value(broadcast_message) }
+
+ subject(:cache) do
+ described_class.new(namespace: namespace, backend: backend, cache_key_strategy: cache_key_strategy)
+ end
+
+ describe '#expire' do
+ context 'with cache_key concerns' do
+ subject(:expire) { cache.expire(key) }
+
+ it 'uses the expanded_key' do
+ expect(backend).to receive(:delete).with(expanded_key)
+
+ expire
+ end
+
+ context 'when namespace is nil' do
+ let(:namespace) { nil }
+
+ it 'uses the expanded_key' do
+ expect(backend).to receive(:delete).with(key)
+
+ expire
+ end
+ end
+ end
+ end
+
+ describe '#read' do
+ context 'when the cached value is a hash' do
+ it 'returns nil when the data is not in a nested structure' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(%w[a b].to_json)
+
+ expect(cache.read(key)).to be_nil
+ end
+
+ context 'when there are other nested keys in the cache' do
+ it 'only returns the value we are concerned with' do
+ current_cache = { '_other_revision_' => '_other_value_' }.merge(nested_cache_result).to_json
+ allow(backend).to receive(:read).with(expanded_key).and_return(current_cache)
+
+ expect(cache.read(key, BroadcastMessage)).to eq(broadcast_message)
+ end
+ end
+ end
+
+ context 'when cache_key_strategy is unknown' do
+ let(:cache_key_strategy) { 'unknown' }
+
+ it 'raises KeyError' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(json_value(true))
+
+ expect { cache.read(key) }.to raise_error(KeyError)
+ end
+ end
+ end
+
+ describe '#write' do
+ context 'when there is an existing value in the cache' do
+ it 'preserves the existing value when writing a different key' do
+ current_cache = { '_other_revision_' => broadcast_message }
+ allow(backend).to receive(:read).with(expanded_key).and_return(current_cache.to_json)
+
+ cache.write(key, broadcast_message)
+
+ write_cache = current_cache.merge(nested_cache_result)
+ expect(backend).to have_received(:write).with(expanded_key, write_cache.to_json, nil)
+ end
+
+ it 'overwrites existing value when writing the same key' do
+ current_cache = { Gitlab.revision => '_old_value_' }
+ allow(backend).to receive(:read).with(expanded_key).and_return(current_cache.to_json)
+
+ cache.write(key, broadcast_message)
+
+ expect(backend).to have_received(:write).with(expanded_key, json_value(broadcast_message), nil)
+ end
+ end
+
+ context 'when using the version strategy' do
+ let(:cache_key_strategy) { :version }
+
+ it 'writes value to the cache with the given key' do
+ cache.write(key, true)
+
+ write_cache = { "#{Gitlab::VERSION}:#{Rails.version}" => true }.to_json
+ expect(backend).to have_received(:write).with(expanded_key, write_cache, nil)
+ end
+ end
+ end
+
+ it_behaves_like 'Json Cache class'
+
+ def json_value(value)
+ nest_value(value).to_json
+ end
+
+ def nest_value(value)
+ { Gitlab.revision => value }
+ end
+end
diff --git a/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb b/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb
new file mode 100644
index 00000000000..6e98cdd74ce
--- /dev/null
+++ b/spec/lib/gitlab/cache/json_caches/redis_keyed_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::JsonCaches::RedisKeyed, feature_category: :shared do
+ let_it_be(:broadcast_message) { create(:broadcast_message) }
+
+ let(:backend) { instance_double(ActiveSupport::Cache::RedisCacheStore).as_null_object }
+ let(:namespace) { 'geo' }
+ let(:key) { 'foo' }
+ let(:cache_key_strategy) { :revision }
+ let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
+
+ subject(:cache) do
+ described_class.new(namespace: namespace, backend: backend, cache_key_strategy: cache_key_strategy)
+ end
+
+ describe '#read' do
+ context 'when the cached value is true' do
+ it 'parses the cached value' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(true)
+
+ expect(Gitlab::Json).to receive(:parse).with("true").and_call_original
+ expect(cache.read(key, BroadcastMessage)).to eq(true)
+ end
+ end
+
+ context 'when the cached value is false' do
+ it 'parses the cached value' do
+ allow(backend).to receive(:read).with(expanded_key).and_return(false)
+
+ expect(Gitlab::Json).to receive(:parse).with("false").and_call_original
+ expect(cache.read(key, BroadcastMessage)).to eq(false)
+ end
+ end
+ end
+
+ describe '#expire' do
+ context 'with cache_key concerns' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:namespace, :cache_key_strategy, :expanded_key) do
+ nil | :revision | "#{key}:#{Gitlab.revision}"
+ nil | :version | "#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ namespace | :revision | "#{namespace}:#{key}:#{Gitlab.revision}"
+ namespace | :version | "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}"
+ end
+
+ with_them do
+ specify do
+ expect(backend).to receive(:delete).with(expanded_key)
+
+ cache.expire(key)
+ end
+ end
+
+ context 'when cache_key_strategy is unknown' do
+ let(:cache_key_strategy) { 'unknown' }
+
+ it 'raises KeyError' do
+ expect { cache.expire(key) }.to raise_error(KeyError)
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'Json Cache class'
+
+ def json_value(value)
+ value.to_json
+ end
+
+ def version_json_value(value)
+ value.to_json
+ end
+end
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
index 7f535e86d69..7ce267c535f 100644
--- a/spec/lib/gitlab/checks/branch_check_spec.rb
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::BranchCheck do
+RSpec.describe Gitlab::Checks::BranchCheck, feature_category: :source_code_management do
include_context 'change access checks context'
describe '#validate!' do
@@ -46,6 +46,23 @@ RSpec.describe Gitlab::Checks::BranchCheck do
expect { subject.validate! }.not_to raise_error
end
end
+
+ context 'when branch name is invalid' do
+ let(:ref) { 'refs/heads/-wrong' }
+
+ it 'prohibits branches with an invalid name' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You cannot create a branch with an invalid name.')
+ end
+
+ context 'deleting an invalid branch' do
+ let(:ref) { 'refs/heads/-wrong' }
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+
+ it "doesn't prohibit the deletion of an invalid branch name" do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
end
context 'protected branches check' do
diff --git a/spec/lib/gitlab/checks/diff_check_spec.rb b/spec/lib/gitlab/checks/diff_check_spec.rb
index 0845c746545..dd467537a4f 100644
--- a/spec/lib/gitlab/checks/diff_check_spec.rb
+++ b/spec/lib/gitlab/checks/diff_check_spec.rb
@@ -24,11 +24,42 @@ RSpec.describe Gitlab::Checks::DiffCheck, feature_category: :source_code_managem
end
end
+ context 'when commits include merge commit' do
+ before do
+ allow(project.repository).to receive(:new_commits).and_return([project.repository.commit(merge_commit)])
+ allow(subject).to receive(:should_run_validations?).and_return(true)
+ allow(subject).to receive(:validate_path)
+ allow(subject).to receive(:validate_file_paths)
+ subject.validate!
+ end
+
+ context 'when merge commit does not include additional changes' do
+ let(:merge_commit) { '2b298117a741cdb06eb48df2c33f1390cf89f7e8' }
+
+ it 'checks the additional changes' do
+ expect(subject).to have_received(:validate_file_paths).with([])
+ end
+ end
+
+ context 'when merge commit includes additional changes' do
+ let(:merge_commit) { '1ada92f78a19f27cb442a0a205f1c451a3a15432' }
+ let(:file_paths) { ['files/locked/baz.lfs'] }
+
+ it 'checks the additional changes' do
+ expect(subject).to have_received(:validate_file_paths).with(file_paths)
+ end
+ end
+ end
+
context 'when commits is not empty' do
+ let(:new_commits) do
+ from = 'be93687618e4b132087f430a4d8fc3a609c9b77c'
+ to = '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51'
+ project.repository.commits_between(from, to)
+ end
+
before do
- allow(project.repository).to receive(:new_commits).and_return(
- project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
- )
+ allow(project.repository).to receive(:new_commits).and_return(new_commits)
end
context 'when deletion is true' do
@@ -74,6 +105,52 @@ RSpec.describe Gitlab::Checks::DiffCheck, feature_category: :source_code_managem
expect { subject.validate! }.not_to raise_error
end
end
+
+ context 'when a merge commit merged a file locked by another user' do
+ let(:new_commits) do
+ project.repository.commits_by(oids: %w[
+ 760c58db5a6f3b64ad7e3ff6b3c4a009da7d9b33
+ 2b298117a741cdb06eb48df2c33f1390cf89f7e8
+ ])
+ end
+
+ before do
+ create(:lfs_file_lock, user: owner, project: project, path: 'files/locked/foo.lfs')
+ create(:lfs_file_lock, user: user, project: project, path: 'files/locked/bar.lfs')
+ end
+
+ it "doesn't raise any error" do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'when a merge commit includes additional file locked by another user' do
+ # e.g. when merging the user added an additional change.
+ # This merge commit: https://gitlab.com/gitlab-org/gitlab-test/-/commit/1ada92f
+ # merges `files/locked/bar.lfs` and also adds a new file
+ # `files/locked/baz.lfs`. In this case we ignore `files/locked/bar.lfs`
+ # as it is already detected in the commit c41e12c, however, we do
+ # detect the new `files/locked/baz.lfs` file.
+ #
+ let(:new_commits) do
+ project.repository.commits_by(oids: %w[
+ 760c58db5a6f3b64ad7e3ff6b3c4a009da7d9b33
+ 2b298117a741cdb06eb48df2c33f1390cf89f7e8
+ c41e12c387b4e0e41bfc17208252d6a6430f2fcd
+ 1ada92f78a19f27cb442a0a205f1c451a3a15432
+ ])
+ end
+
+ before do
+ create(:lfs_file_lock, user: owner, project: project, path: 'files/locked/foo.lfs')
+ create(:lfs_file_lock, user: user, project: project, path: 'files/locked/bar.lfs')
+ create(:lfs_file_lock, user: owner, project: project, path: 'files/locked/baz.lfs')
+ end
+
+ it "does raise an error" do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "The path 'files/locked/baz.lfs' is locked in Git LFS by #{owner.name}")
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index 49e02fe5cec..8cdee727d3d 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -6,14 +6,34 @@ RSpec.describe Gitlab::Checks::ForcePush do
let_it_be(:project) { create(:project, :repository) }
describe '.force_push?' do
- it 'returns false if the repo is empty' do
- allow(project).to receive(:empty_repo?).and_return(true)
+ let(:old_rev) { 'HEAD~' }
+ let(:new_rev) { 'HEAD' }
- expect(described_class.force_push?(project, 'HEAD', 'HEAD~')).to be(false)
+ subject(:force_push) { described_class.force_push?(project, old_rev, new_rev) }
+
+ context 'when the repo is empty' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(true)
+ end
+
+ it 'returns false' do
+ expect(force_push).to be(false)
+ end
end
- it 'checks if old rev is an anchestor' do
- expect(described_class.force_push?(project, 'HEAD', 'HEAD~')).to be(true)
+ context 'when new rev is a descendant of old rev' do
+ it 'returns false' do
+ expect(force_push).to be(false)
+ end
+ end
+
+ context 'when new rev is not a descendant of old rev' do
+ let(:old_rev) { 'HEAD' }
+ let(:new_rev) { 'HEAD~' }
+
+ it 'returns true' do
+ expect(force_push).to be(true)
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
index 813dc15e79f..76a596e1db3 100644
--- a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
+++ b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do
context 'when artifact archive size is greater than the limit' do
let(:expected_error) do
- "Artifacts archive for job `#{job.name}` is too large: max 1 KB"
+ "Artifacts archive for job `#{job.name}` is too large: max 1 KiB"
end
before do
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do
context 'when metadata entry shows size greater than the limit' do
let(:expected_error) do
- "Artifacts archive for job `#{job.name}` is too large: max 5 MB"
+ "Artifacts archive for job `#{job.name}` is too large: max 5 MiB"
end
before do
diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb
index d4a2af0015f..6047eb1b1e0 100644
--- a/spec/lib/gitlab/ci/build/context/build_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/build_spec.rb
@@ -14,28 +14,12 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co
it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
it { is_expected.to include('CI_JOB_NAME' => 'some-job') }
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') }
- end
-
context 'without passed build-specific attributes' do
let(:context) { described_class.new(pipeline) }
it { is_expected.to include('CI_JOB_NAME' => nil) }
it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') }
- end
end
context 'when environment:name is provided' do
diff --git a/spec/lib/gitlab/ci/build/context/global_spec.rb b/spec/lib/gitlab/ci/build/context/global_spec.rb
index 328b5eb62fa..cf511cf1560 100644
--- a/spec/lib/gitlab/ci/build/context/global_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/global_spec.rb
@@ -15,14 +15,6 @@ RSpec.describe Gitlab::Ci::Build::Context::Global, feature_category: :pipeline_c
it { is_expected.not_to have_key('CI_JOB_NAME') }
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it { is_expected.not_to have_key('CI_BUILD_REF_NAME') }
- end
-
context 'with passed yaml variables' do
let(:yaml_variables) { [{ key: 'SUPPORTED', value: 'parsed', public: true }] }
diff --git a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
index baabab73ea2..ac66fc3b773 100644
--- a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
+++ b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
+RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace, feature_category: :continuous_delivery do
describe '#unmet?' do
let(:build) { create(:ci_build) }
@@ -17,15 +17,13 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'build has a deployment' do
- let!(:deployment) { create(:deployment, deployable: build, cluster: cluster) }
-
context 'and a cluster to deploy to' do
- let(:cluster) { create(:cluster, :group) }
+ let!(:deployment) { create(:deployment, :on_cluster, deployable: build) }
it { is_expected.to be_truthy }
context 'and the cluster is not managed' do
- let(:cluster) { create(:cluster, :not_managed, projects: [build.project]) }
+ let!(:deployment) { create(:deployment, :on_cluster_not_managed, deployable: build) }
it { is_expected.to be_falsey }
end
@@ -63,8 +61,8 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
subject { prerequisite.complete! }
context 'completion is required' do
- let(:cluster) { create(:cluster, :group) }
- let(:deployment) { create(:deployment, cluster: cluster) }
+ let(:cluster) { deployment.cluster }
+ let(:deployment) { create(:deployment, :on_cluster) }
let(:service) { double(execute: true) }
let(:kubernetes_namespace) { double }
@@ -84,12 +82,12 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
it 'creates a namespace using a new record' do
expect(Clusters::BuildKubernetesNamespaceService)
.to receive(:new)
- .with(cluster, environment: deployment.environment)
+ .with(deployment.cluster, environment: deployment.environment)
.and_return(namespace_builder)
expect(Clusters::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
- .with(cluster: cluster, kubernetes_namespace: kubernetes_namespace)
+ .with(cluster: deployment.cluster, kubernetes_namespace: kubernetes_namespace)
.and_return(service)
expect(service).to receive(:execute).once
@@ -112,12 +110,12 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
it 'creates a namespace' do
expect(Clusters::BuildKubernetesNamespaceService)
.to receive(:new)
- .with(cluster, environment: deployment.environment)
+ .with(deployment.cluster, environment: deployment.environment)
.and_return(namespace_builder)
expect(Clusters::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
- .with(cluster: cluster, kubernetes_namespace: kubernetes_namespace)
+ .with(cluster: deployment.cluster, kubernetes_namespace: kubernetes_namespace)
.and_return(service)
expect(service).to receive(:execute).once
@@ -150,7 +148,7 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
expect(Clusters::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
- .with(cluster: cluster, kubernetes_namespace: kubernetes_namespace)
+ .with(cluster: deployment.cluster, kubernetes_namespace: kubernetes_namespace)
.and_return(service)
subject
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index 1ece0f6b7b9..9f191fed581 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Build::Rules do
+RSpec.describe Gitlab::Ci::Build::Rules, feature_category: :pipeline_composition do
let_it_be(:pipeline) { create(:ci_pipeline) }
let_it_be(:ci_build) { build(:ci_build, pipeline: pipeline) }
@@ -80,37 +80,37 @@ RSpec.describe Gitlab::Ci::Build::Rules do
context 'with nil rules' do
let(:rule_list) { nil }
- it { is_expected.to eq(described_class::Result.new('on_success')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success')) }
context 'and when:manual set as the default' do
let(:rules) { described_class.new(rule_list, default_when: 'manual') }
- it { is_expected.to eq(described_class::Result.new('manual')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'manual')) }
end
end
context 'with no rules' do
let(:rule_list) { [] }
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
context 'and when:manual set as the default' do
let(:rules) { described_class.new(rule_list, default_when: 'manual') }
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
end
end
context 'with one rule without any clauses' do
let(:rule_list) { [{ when: 'manual', allow_failure: true }] }
- it { is_expected.to eq(described_class::Result.new('manual', nil, true, nil)) }
+ it { is_expected.to eq(described_class::Result.new(when: 'manual', allow_failure: true)) }
end
context 'with one matching rule' do
let(:rule_list) { [{ if: '$VAR == null', when: 'always' }] }
- it { is_expected.to eq(described_class::Result.new('always')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'always')) }
end
context 'with two matching rules' do
@@ -122,7 +122,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
it 'returns the value of the first matched rule in the list' do
- expect(subject).to eq(described_class::Result.new('delayed', '1 day'))
+ expect(subject).to eq(described_class::Result.new(when: 'delayed', start_in: '1 day'))
end
end
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
]
end
- it { is_expected.to eq(described_class::Result.new('always')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'always')) }
end
context 'with a matching and non-matching rule' do
@@ -145,7 +145,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
]
end
- it { is_expected.to eq(described_class::Result.new('delayed', '1 day')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'delayed', start_in: '1 day')) }
end
context 'with non-matching rules' do
@@ -156,13 +156,13 @@ RSpec.describe Gitlab::Ci::Build::Rules do
]
end
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
context 'and when:manual set as the default' do
let(:rules) { described_class.new(rule_list, default_when: 'manual') }
it 'does not return the default when:' do
- expect(subject).to eq(described_class::Result.new('never'))
+ expect(subject).to eq(described_class::Result.new(when: 'never'))
end
end
end
@@ -171,25 +171,29 @@ RSpec.describe Gitlab::Ci::Build::Rules do
context 'with matching rule' do
let(:rule_list) { [{ if: '$VAR == null', allow_failure: true }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, true, nil)) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success', allow_failure: true)) }
end
context 'with non-matching rule' do
let(:rule_list) { [{ if: '$VAR != null', allow_failure: true }] }
- it { is_expected.to eq(described_class::Result.new('never')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'never')) }
end
end
context 'with needs' do
- context 'when single needs is specified' do
+ context 'when single need is specified' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{ name: 'test', artifacts: true, optional: false }] }]
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil,
- [{ name: 'test', artifacts: true, optional: false }], nil))
+ is_expected.to eq(described_class::Result.new(
+ when: 'on_success',
+ needs: [{ name: 'test',
+ artifacts: true,
+ optional: false }]
+ ))
}
end
@@ -201,32 +205,43 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil,
- [{ name: 'test', artifacts: true, optional: false },
- { name: 'rspec', artifacts: true, optional: false }], nil))
+ is_expected.to eq(described_class::Result.new(
+ when: 'on_success',
+ needs: [{ name: 'test',
+ artifacts: true,
+ optional: false },
+ { name: 'rspec',
+ artifacts: true,
+ optional: false }]))
}
end
context 'when there are no needs specified' do
let(:rule_list) { [{ if: '$VAR == null' }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil)) }
+ it {
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
+ }
end
context 'when need is specified with additional attibutes' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{
- artifacts: true,
+ artifacts: false,
name: 'test',
- optional: false,
+ optional: true,
when: 'never'
}] }]
end
it {
is_expected.to eq(
- described_class::Result.new('on_success', nil, nil, nil,
- [{ artifacts: true, name: 'test', optional: false, when: 'never' }], nil))
+ described_class::Result.new(
+ when: 'on_success',
+ needs: [{ artifacts: false,
+ name: 'test',
+ optional: true,
+ when: 'never' }]))
}
end
@@ -236,13 +251,13 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
context 'with needs' do
- context 'when single needs is specified' do
+ context 'when single need is specified' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{ name: 'test', artifacts: true, optional: false }] }]
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil))
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
}
end
@@ -254,29 +269,30 @@ RSpec.describe Gitlab::Ci::Build::Rules do
end
it {
- is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil))
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
}
end
context 'when there are no needs specified' do
let(:rule_list) { [{ if: '$VAR == null' }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, nil, nil, nil, nil)) }
+ it {
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
+ }
end
context 'when need is specified with additional attibutes' do
let(:rule_list) do
[{ if: '$VAR == null', needs: [{
- artifacts: true,
+ artifacts: false,
name: 'test',
- optional: false,
+ optional: true,
when: 'never'
}] }]
end
it {
- is_expected.to eq(
- described_class::Result.new('on_success', nil, nil, nil, nil, nil))
+ is_expected.to eq(described_class::Result.new(when: 'on_success'))
}
end
end
@@ -287,7 +303,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
context 'with matching rule' do
let(:rule_list) { [{ if: '$VAR == null', variables: { MY_VAR: 'my var' } }] }
- it { is_expected.to eq(described_class::Result.new('on_success', nil, nil, { MY_VAR: 'my var' })) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success', variables: { MY_VAR: 'my var' })) }
end
end
@@ -301,7 +317,7 @@ RSpec.describe Gitlab::Ci::Build::Rules do
)
end
- it { is_expected.to eq(described_class::Result.new('on_success')) }
+ it { is_expected.to eq(described_class::Result.new(when: 'on_success')) }
end
end
@@ -313,7 +329,12 @@ RSpec.describe Gitlab::Ci::Build::Rules do
let(:needs) { nil }
subject(:result) do
- Gitlab::Ci::Build::Rules::Result.new(when_value, start_in, allow_failure, variables, needs)
+ Gitlab::Ci::Build::Rules::Result.new(
+ when: when_value,
+ start_in: start_in,
+ allow_failure: allow_failure,
+ variables: variables,
+ needs: needs)
end
describe '#build_attributes' do
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 82db116fa0d..7e869826522 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -82,6 +82,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
'pull-push' | 'pull-push'
'push' | 'push'
'pull' | 'pull'
+ '$VARIABLE' | '$VARIABLE'
'unknown' | 'unknown' # invalid
end
@@ -145,6 +146,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
'pull-push' | true
'push' | true
'pull' | true
+ '$VARIABLE' | true
'unknown' | false
end
@@ -280,7 +282,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
let(:config) { { policy: 'unknown' } }
it 'returns error' do
- is_expected.to include('cache policy should be one of: pull-push, push, pull')
+ is_expected.to include('cache policy should be a variable or one of: pull-push, push, pull')
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/id_token_spec.rb b/spec/lib/gitlab/ci/config/entry/id_token_spec.rb
index 12585d662ec..d8a3c98e575 100644
--- a/spec/lib/gitlab/ci/config/entry/id_token_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/id_token_spec.rb
@@ -15,6 +15,28 @@ RSpec.describe Gitlab::Ci::Config::Entry::IdToken do
end
end
+ context 'when given `aud` is a variable' do
+ it 'is valid' do
+ config = { aud: '$WATHEVER' }
+ id_token = described_class.new(config)
+
+ id_token.compose!
+
+ expect(id_token).to be_valid
+ end
+ end
+
+ context 'when given `aud` includes a variable' do
+ it 'is valid' do
+ config = { aud: 'blah-$WATHEVER' }
+ id_token = described_class.new(config)
+
+ id_token.compose!
+
+ expect(id_token).to be_valid
+ end
+ end
+
context 'when given `aud` as an array' do
it 'is valid and concatenates the values' do
config = { aud: ['https://gitlab.com', 'https://aws.com'] }
@@ -27,6 +49,17 @@ RSpec.describe Gitlab::Ci::Config::Entry::IdToken do
end
end
+ context 'when given `aud` as an array with variables' do
+ it 'is valid and concatenates the values' do
+ config = { aud: ['$WATHEVER', 'blah-$WATHEVER'] }
+ id_token = described_class.new(config)
+
+ id_token.compose!
+
+ expect(id_token).to be_valid
+ end
+ end
+
context 'when not given an `aud`' do
it 'is invalid' do
config = {}
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index 6116fbced2b..10c1d92e209 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -3,7 +3,7 @@
require 'fast_spec_helper'
require_dependency 'active_model'
-RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
+RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category: :pipeline_composition do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.value(config)
@@ -24,6 +24,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
let(:config) { { if: '$THIS || $THAT' } }
it { is_expected.to be_valid }
+
+ context 'with when:' do
+ let(:config) { { if: '$THIS || $THAT', when: 'never' } }
+
+ it { is_expected.to be_valid }
+ end
end
context 'when specifying an exists: clause' do
@@ -90,6 +96,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do
it 'returns the config' do
expect(subject).to eq(if: '$THIS || $THAT')
end
+
+ context 'with when:' do
+ let(:config) { { if: '$THIS || $THAT', when: 'never' } }
+
+ it 'returns the config' do
+ expect(subject).to eq(if: '$THIS || $THAT', when: 'never')
+ end
+ end
end
context 'when specifying an exists: clause' do
diff --git a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
index 087dacd5ef0..1f4586bd5a9 100644
--- a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: :
context 'when job has artifacts exceeding the max allowed size' do
let(:expected_error) do
- "Artifacts archive for job `generator` is too large: max 1 KB"
+ "Artifacts archive for job `generator` is too large: max 1 KiB"
end
before do
diff --git a/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb
index 5195567ebb4..4da3e7e51a7 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/filter_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Filter, feature_category: :
describe '#process' do
let(:locations) do
[{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] },
+ { remote: 'https://testing.com/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1', when: 'never' }] },
{ remote: 'https://example.com/.gitlab-ci.yml', rules: [{ if: '$VARIABLE2' }] }]
end
@@ -28,5 +29,18 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Filter, feature_category: :
[{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] }]
)
end
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it 'filters locations according to rules ignoring when:' do
+ is_expected.to eq(
+ [{ local: 'config/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1' }] },
+ { remote: 'https://testing.com/.gitlab-ci.yml', rules: [{ if: '$VARIABLE1', when: 'never' }] }]
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
index 1ee46daa196..e7dd5bd5079 100644
--- a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb
@@ -147,43 +147,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category:
expect(access_check_queries.values.sum).to eq(2)
end
- context 'when the FF ci_batch_project_includes_context is disabled' do
- before do
- stub_feature_flags(ci_batch_project_includes_context: false)
- end
-
- it 'returns an array of file objects' do
- expect(process.map(&:location)).to contain_exactly(
- 'myfolder/file1.yml', 'myfolder/file2.yml', 'myfolder/file3.yml',
- 'myfolder/file1.yml', 'myfolder/file2.yml'
- )
- end
-
- it 'adds files to the expandset' do
- expect { process }.to change { context.expandset.count }.by(5)
- end
-
- it 'calls Gitaly for all files', :request_store do
- files # calling this to load project creations and the `project.commit.id` call
-
- # 5 for the sha check, 2 for the files in batch
- expect { process }.to change { Gitlab::GitalyClient.get_request_count }.by(7)
- end
-
- it 'queries without batch', :use_sql_query_cache do
- files # calling this to load project creations and the `project.commit.id` call
-
- queries = ActiveRecord::QueryRecorder.new(skip_cached: false) { process }
- projects_queries = queries.occurrences_starting_with('SELECT "projects"')
- access_check_queries = queries.occurrences_starting_with(
- 'SELECT MAX("project_authorizations"."access_level")'
- )
-
- expect(projects_queries.values.sum).to eq(5)
- expect(access_check_queries.values.sum).to eq(5)
- end
- end
-
context 'when a project is missing' do
let(:files) do
[
@@ -203,20 +166,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category:
expect(process.all?(&:valid?)).to be_falsey
end
-
- context 'when the FF ci_batch_project_includes_context is disabled' do
- before do
- stub_feature_flags(ci_batch_project_includes_context: false)
- end
-
- it 'returns an array of file objects' do
- expect(process.map(&:location)).to contain_exactly(
- 'myfolder/file1.yml', 'myfolder/file2.yml'
- )
-
- expect(process.all?(&:valid?)).to be_falsey
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb
index cc73338b5a8..1ba5caa1d4b 100644
--- a/spec/lib/gitlab/ci/config/external/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb
@@ -3,43 +3,42 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_composition do
- let(:rule_hashes) {}
+ # Remove `project` property when FF `ci_support_include_rules_when_never` is removed
+ let(:context) { double(variables_hash: {}, project: nil) }
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
subject(:rules) { described_class.new(rule_hashes) }
describe '#evaluate' do
- let(:context) { double(variables_hash: {}) }
-
subject(:result) { rules.evaluate(context).pass? }
context 'when there is no rule' do
+ let(:rule_hashes) {}
+
it { is_expected.to eq(true) }
end
- context 'when there is a rule with if' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
+ shared_examples 'when there is a rule with if' do |rule_matched_result = true, rule_not_matched_result = false|
+ # Remove this `before` block when FF `ci_support_include_rules_when_never` is removed
+ before do
+ allow(context).to receive(:project).and_return(nil)
+ end
context 'when the rule matches' do
let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) }
- it { is_expected.to eq(true) }
+ it { is_expected.to eq(rule_matched_result) }
end
context 'when the rule does not match' do
let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) }
- it { is_expected.to eq(false) }
+ it { is_expected.to eq(rule_not_matched_result) }
end
end
- context 'when there is a rule with exists' do
+ shared_examples 'when there is a rule with exists' do |file_exists_result = true, file_not_exists_result = false|
let(:project) { create(:project, :repository) }
- let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['test.md']) }
- let(:rule_hashes) { [{ exists: 'Dockerfile' }] }
-
- context 'when the file does not exist' do
- it { is_expected.to eq(false) }
- end
context 'when the file exists' do
let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['Dockerfile']) }
@@ -48,16 +47,111 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_
project.repository.create_file(project.first_owner, 'Dockerfile', "commit", message: 'test', branch_name: "master")
end
- it { is_expected.to eq(true) }
+ it { is_expected.to eq(file_exists_result) }
end
+
+ context 'when the file does not exist' do
+ let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['test.md']) }
+
+ it { is_expected.to eq(file_not_exists_result) }
+ end
+ end
+
+ it_behaves_like 'when there is a rule with if'
+
+ context 'when there is a rule with exists' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile' }] }
+
+ it_behaves_like 'when there is a rule with exists'
end
context 'when there is a rule with if and when' do
- let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+ context 'with when: never' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'never' }] }
- it 'raises an error' do
- expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
- 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
+ it_behaves_like 'when there is a rule with if', false, false
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with if'
+ end
+ end
+
+ context 'with when: always' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'always' }] }
+
+ it_behaves_like 'when there is a rule with if'
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with if'
+ end
+ end
+
+ context 'with when: <invalid string>' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}')
+ end
+ end
+
+ context 'with when: null' do
+ let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: nil }] }
+
+ it_behaves_like 'when there is a rule with if'
+ end
+ end
+
+ context 'when there is a rule with exists and when' do
+ context 'with when: never' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'never' }] }
+
+ it_behaves_like 'when there is a rule with exists', false, false
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with exists'
+ end
+ end
+
+ context 'with when: always' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'always' }] }
+
+ it_behaves_like 'when there is a rule with exists'
+
+ context 'when FF `ci_support_include_rules_when_never` is disabled' do
+ before do
+ stub_feature_flags(ci_support_include_rules_when_never: false)
+ end
+
+ it_behaves_like 'when there is a rule with exists'
+ end
+ end
+
+ context 'with when: <invalid string>' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] }
+
+ it 'raises an error' do
+ expect { result }.to raise_error(described_class::InvalidIncludeRulesError,
+ 'invalid include rule: {:exists=>"Dockerfile", :when=>"on_success"}')
+ end
+ end
+
+ context 'with when: null' do
+ let(:rule_hashes) { [{ exists: 'Dockerfile', when: nil }] }
+
+ it_behaves_like 'when there is a rule with exists'
end
end
diff --git a/spec/lib/gitlab/ci/config/external/interpolator_spec.rb b/spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb
index fe6f97a66a5..726ed6d95a0 100644
--- a/spec/lib/gitlab/ci/config/external/interpolator_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/interpolator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::External::Interpolator, feature_category: :pipeline_composition do
+RSpec.describe Gitlab::Ci::Config::Yaml::Interpolator, feature_category: :pipeline_composition do
let_it_be(:project) { create(:project) }
let(:ctx) { instance_double(Gitlab::Ci::Config::External::Context, project: project, user: build(:user, id: 1234)) }
diff --git a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
new file mode 100644
index 00000000000..1e417bcd8af
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Yaml::Loader, feature_category: :pipeline_composition do
+ describe '#to_result' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:result) { described_class.new(yaml, project: project).to_result }
+
+ context 'when syntax is invalid' do
+ let(:yaml) { 'some: invalid: syntax' }
+
+ it 'returns an invalid result object' do
+ expect(result).not_to be_valid
+ expect(result.error).to be_a ::Gitlab::Config::Loader::FormatError
+ end
+ end
+
+ context 'when the first document is a header' do
+ context 'with explicit document start marker' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as header and the second as content' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result.header).to eq({ spec: nil })
+ expect(result.content).to eq({ b: 2 })
+ end
+ end
+ end
+
+ context 'when first document is empty' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as header and the second as content' do
+ expect(result).not_to have_header
+ end
+ end
+
+ context 'when first document is an empty hash' do
+ let(:yaml) do
+ <<~YAML
+ {}
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'returns second document as a content' do
+ expect(result).not_to have_header
+ expect(result.content).to eq({ b: 2 })
+ end
+ end
+
+ context 'when first an array' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ - a
+ - b
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as header and the second as content' do
+ expect(result).not_to have_header
+ end
+ end
+
+ context 'when the first document is not a header' do
+ let(:yaml) do
+ <<~YAML
+ a: 1
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as content for backwards compatibility' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+
+ context 'with explicit document start marker' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ a: 1
+ ---
+ b: 2
+ YAML
+ end
+
+ it 'considers the first document as content for backwards compatibility' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+ end
+ end
+
+ context 'when the first document is not a header and second document is empty' do
+ let(:yaml) do
+ <<~YAML
+ a: 1
+ ---
+ YAML
+ end
+
+ it 'considers the first document as content' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+
+ context 'with explicit document start marker' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ a: 1
+ ---
+ YAML
+ end
+
+ it 'considers the first document as content' do
+ expect(result).to be_valid
+ expect(result.error).to be_nil
+ expect(result).not_to have_header
+ expect(result.content).to eq({ a: 1 })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/yaml/result_spec.rb b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
index 72d96349668..d17e0609ef6 100644
--- a/spec/lib/gitlab/ci/config/yaml/result_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/result_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Result, feature_category: :pipeline_com
result = described_class.new(config: [nil, { a: 1 }])
expect(result).not_to have_header
- expect(result.content).to be_nil
+ expect(result.content).to be_empty
end
end
diff --git a/spec/lib/gitlab/ci/config/yaml_spec.rb b/spec/lib/gitlab/ci/config/yaml_spec.rb
index beb872071d2..3576dd481c6 100644
--- a/spec/lib/gitlab/ci/config/yaml_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition do
describe '.load!' do
- it 'loads a single-doc YAML file' do
+ it 'loads a YAML file' do
yaml = <<~YAML
image: 'image:1.0'
texts:
@@ -26,30 +26,6 @@ RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition
})
end
- it 'loads the first document from a multi-doc YAML file' do
- yaml = <<~YAML
- spec:
- inputs:
- test_input:
- ---
- image: 'image:1.0'
- texts:
- nested_key: 'value1'
- more_text:
- more_nested_key: 'value2'
- YAML
-
- config = described_class.load!(yaml)
-
- expect(config).to eq({
- spec: {
- inputs: {
- test_input: nil
- }
- }
- })
- end
-
context 'when YAML is invalid' do
let(:yaml) { 'some: invalid: syntax' }
@@ -58,58 +34,6 @@ RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition
.to raise_error ::Gitlab::Config::Loader::FormatError, /mapping values are not allowed in this context/
end
end
-
- context 'when ci_multi_doc_yaml is disabled' do
- before do
- stub_feature_flags(ci_multi_doc_yaml: false)
- end
-
- it 'loads a single-doc YAML file' do
- yaml = <<~YAML
- image: 'image:1.0'
- texts:
- nested_key: 'value1'
- more_text:
- more_nested_key: 'value2'
- YAML
-
- config = described_class.load!(yaml)
-
- expect(config).to eq({
- image: 'image:1.0',
- texts: {
- nested_key: 'value1',
- more_text: {
- more_nested_key: 'value2'
- }
- }
- })
- end
-
- it 'loads the first document from a multi-doc YAML file' do
- yaml = <<~YAML
- spec:
- inputs:
- test_input:
- ---
- image: 'image:1.0'
- texts:
- nested_key: 'value1'
- more_text:
- more_nested_key: 'value2'
- YAML
-
- config = described_class.load!(yaml)
-
- expect(config).to eq({
- spec: {
- inputs: {
- test_input: nil
- }
- }
- })
- end
- end
end
describe '.load_result!' do
diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb
index 528be4b5da7..15be67329a8 100644
--- a/spec/lib/gitlab/ci/jwt_v2_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb
@@ -111,6 +111,80 @@ RSpec.describe Gitlab::Ci::JwtV2, feature_category: :continuous_integration do
expect(payload[:sha]).to eq(pipeline.sha)
end
end
+
+ describe 'ci_config_ref_uri' do
+ let(:project_config) do
+ instance_double(
+ Gitlab::Ci::ProjectConfig,
+ url: 'gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml',
+ source: :repository_source
+ )
+ end
+
+ before do
+ allow(Gitlab::Ci::ProjectConfig).to receive(:new).with(
+ project: project,
+ sha: pipeline.sha,
+ pipeline_source: pipeline.source.to_sym,
+ pipeline_source_bridge: pipeline.source_bridge
+ ).and_return(project_config)
+ end
+
+ it 'joins project_config.url and pipeline.source_ref_path with @' do
+ expect(payload[:ci_config_ref_uri]).to eq('gitlab.com/gitlab-org/gitlab//.gitlab-ci.yml' \
+ '@refs/heads/auto-deploy-2020-03-19')
+ end
+
+ context 'when project config is nil' do
+ before do
+ allow(Gitlab::Ci::ProjectConfig).to receive(:new).and_return(nil)
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+
+ context 'when ProjectConfig#url raises an error' do
+ before do
+ allow(project_config).to receive(:url).and_raise(RuntimeError)
+ end
+
+ it 'raises the same error' do
+ expect { payload }.to raise_error(RuntimeError)
+ end
+
+ context 'in production' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+ end
+
+ context 'when ci_jwt_v2_ci_config_ref_uri_claim flag is disabled' do
+ before do
+ stub_feature_flags(ci_jwt_v2_ref_uri_claim: false)
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+
+ context 'when config source is not repository' do
+ before do
+ allow(project_config).to receive(:source).and_return(:auto_devops_source)
+ end
+
+ it 'is nil' do
+ expect(payload[:ci_config_ref_uri]).to be_nil
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 421aa29f860..dc16ddf4e0e 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -183,55 +183,44 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
describe 'parsing finding.name' do
let(:artifact) { build(:ci_job_artifact, :common_security_report_with_blank_names) }
- context 'when message is provided' do
- it 'sets message from the report as a finding name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1020' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['message']
+ context 'when name is provided' do
+ it 'sets name from the report as a name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
expect(finding.name).to eq(expected_name)
end
end
- context 'when message is not provided' do
- context 'and name is provided' do
- it 'sets name from the report as a name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
- expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
+ context 'when name is not provided' do
+ context 'when location does not exist' do
+ let(:location) { nil }
- expect(finding.name).to eq(expected_name)
+ it 'returns only identifier name' do
+ finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ expect(finding.name).to eq("CVE-2017-11429")
end
end
- context 'and name is not provided' do
- context 'when location does not exist' do
- let(:location) { nil }
-
- it 'returns only identifier name' do
+ context 'when location exists' do
+ context 'when CVE identifier exists' do
+ it 'combines identifier with location to create name' do
finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
- expect(finding.name).to eq("CVE-2017-11429")
+ expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
end
end
- context 'when location exists' do
- context 'when CVE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
- expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
- end
- end
-
- context 'when CWE identifier exists' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
- expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
- end
+ context 'when CWE identifier exists' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
+ expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
end
+ end
- context 'when neither CVE nor CWE identifier exist' do
- it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
- expect(finding.name).to eq("other-2017-11429 in yarn.lock")
- end
+ context 'when neither CVE nor CWE identifier exist' do
+ it 'combines identifier with location to create name' do
+ finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
+ expect(finding.name).to eq("other-2017-11429 in yarn.lock")
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 9d5a9bc8058..5f87e0ccc33 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -909,30 +909,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co
end
end
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- context 'with an explicit `when: on_failure`' do
- where(:rule_set) do
- [
- [[{ if: '$CI_JOB_NAME == "rspec" && $VAR == null', when: 'on_failure' }]],
- [[{ if: '$VARIABLE != null', when: 'delayed', start_in: '1 day' }, { if: '$CI_JOB_NAME == "rspec"', when: 'on_failure' }]],
- [[{ if: '$VARIABLE == "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$CI_BUILD_NAME == "rspec"', when: 'on_failure' }]]
- ]
- end
-
- with_them do
- it { is_expected.to be_included }
-
- it 'correctly populates when:' do
- expect(seed_build.attributes).to include(when: 'on_failure')
- end
- end
- end
- end
-
context 'with an explicit `when: delayed`' do
where(:rule_set) do
[
diff --git a/spec/lib/gitlab/ci/project_config_spec.rb b/spec/lib/gitlab/ci/project_config_spec.rb
index c4b179c9ef5..13ef0939ddd 100644
--- a/spec/lib/gitlab/ci/project_config_spec.rb
+++ b/spec/lib/gitlab/ci/project_config_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::ProjectConfig do
- let(:project) { create(:project, :empty_repo, ci_config_path: ci_config_path) }
+RSpec.describe Gitlab::Ci::ProjectConfig, feature_category: :pipeline_composition do
+ let_it_be(:project) { create(:project, :empty_repo) }
let(:sha) { '123456' }
let(:content) { nil }
let(:source) { :push }
@@ -14,9 +14,13 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
custom_content: content, pipeline_source: source, pipeline_source_bridge: bridge)
end
+ before do
+ project.ci_config_path = ci_config_path
+ end
+
context 'when bridge job is passed in as parameter' do
let(:ci_config_path) { nil }
- let(:bridge) { create(:ci_bridge) }
+ let(:bridge) { build_stubbed(:ci_bridge) }
before do
allow(bridge).to receive(:yaml_for_downstream).and_return('the-yaml')
@@ -25,6 +29,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns the content already available in command' do
expect(config.source).to eq(:bridge_source)
expect(config.content).to eq('the-yaml')
+ expect(config.url).to be_nil
end
end
@@ -48,6 +53,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the local custom file' do
expect(config.source).to eq(:repository_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to eq("localhost/#{project.full_path}//path/to/config.yml")
end
end
@@ -64,6 +70,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the remote config' do
expect(config.source).to eq(:remote_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to be_nil
end
end
@@ -81,6 +88,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the path to another repository' do
expect(config.source).to eq(:external_project_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to be_nil
end
context 'when path specifies a refname' do
@@ -122,6 +130,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the canonical CI config file' do
expect(config.source).to eq(:repository_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to eq("localhost/#{project.full_path}//.gitlab-ci.yml")
end
end
@@ -142,6 +151,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns root config including the auto-devops template' do
expect(config.source).to eq(:auto_devops_source)
expect(config.content).to eq(config_content_result)
+ expect(config.url).to be_nil
end
end
@@ -159,6 +169,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns the parameter content' do
expect(config.source).to eq(:parameter_source)
expect(config.content).to eq(content)
+ expect(config.url).to be_nil
end
end
@@ -172,6 +183,7 @@ RSpec.describe Gitlab::Ci::ProjectConfig do
it 'returns nil' do
expect(config.source).to be_nil
expect(config.content).to be_nil
+ expect(config.url).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb b/spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb
new file mode 100644
index 00000000000..8f1b300ae98
--- /dev/null
+++ b/spec/lib/gitlab/ci/secure_files/migration_helper_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::SecureFiles::MigrationHelper, feature_category: :mobile_devops do
+ before do
+ stub_ci_secure_file_object_storage
+ end
+
+ describe '.migrate_to_remote_storage' do
+ let!(:local_file) { create(:ci_secure_file) }
+
+ subject { described_class.migrate_to_remote_storage }
+
+ it 'migrates remote files to remote storage' do
+ subject
+
+ expect(local_file.reload.file_store).to eq(Ci::SecureFileUploader::Store::REMOTE)
+ end
+ end
+
+ describe '.migrate_in_batches' do
+ let!(:local_file) { create(:ci_secure_file) }
+ let!(:storage) { Ci::SecureFileUploader::Store::REMOTE }
+
+ subject { described_class.migrate_to_remote_storage }
+
+ it 'migrates the given file to the given storage backend' do
+ expect_next_found_instance_of(Ci::SecureFile) do |instance|
+ expect(instance).to receive_message_chain(:file, :migrate!).with(storage)
+ end
+
+ described_class.send(:migrate_in_batches, Ci::SecureFile.all, storage)
+ end
+
+ it 'calls the given block for each migrated file' do
+ expect_next_found_instance_of(Ci::SecureFile) do |instance|
+ expect(instance).to receive(:metadata)
+ end
+
+ described_class.send(:migrate_in_batches, Ci::SecureFile.all, storage, &:metadata)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/status/build/factory_spec.rb b/spec/lib/gitlab/ci/status/build/factory_spec.rb
index 21eca97331e..f71f3d47452 100644
--- a/spec/lib/gitlab/ci/status/build/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/factory_spec.rb
@@ -370,7 +370,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Factory do
end
it 'fabricates status with correct details' do
- expect(status.text).to eq s_('CiStatusText|delayed')
+ expect(status.text).to eq s_('CiStatusText|scheduled')
expect(status.group).to eq 'scheduled'
expect(status.icon).to eq 'status_scheduled'
expect(status.favicon).to eq 'favicon_status_scheduled'
diff --git a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
deleted file mode 100644
index b79b78d911b..00000000000
--- a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Ci::Status::Build::WaitingForApproval do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let_it_be(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
-
- subject { described_class.new(Gitlab::Ci::Status::Core.new(build, user)) }
-
- describe '.matches?' do
- subject { described_class.matches?(build, user) }
-
- let(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
-
- before do
- create(:deployment, deployment_status, deployable: build, project: project)
- end
-
- context 'when build is waiting for approval' do
- let(:deployment_status) { :blocked }
-
- it 'is a correct match' do
- expect(subject).to be_truthy
- end
- end
-
- context 'when build is not waiting for approval' do
- let(:deployment_status) { :created }
-
- it 'does not match' do
- expect(subject).to be_falsey
- end
- end
- end
-
- describe '#illustration' do
- before do
- environment = create(:environment, name: 'production', project: project)
- create(:deployment, :blocked, project: project, environment: environment, deployable: build)
- end
-
- it { expect(subject.illustration).to include(:image, :size) }
- it { expect(subject.illustration[:title]).to eq('Waiting for approval') }
- it { expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"') }
- end
-
- describe '#has_action?' do
- it { expect(subject.has_action?).to be_truthy }
- end
-
- describe '#action_icon' do
- it { expect(subject.action_icon).to be_nil }
- end
-
- describe '#action_title' do
- it { expect(subject.action_title).to be_nil }
- end
-
- describe '#action_button_title' do
- it { expect(subject.action_button_title).to eq('Go to environments page to approve or reject') }
- end
-
- describe '#action_path' do
- it { expect(subject.action_path).to include('environments') }
- end
-
- describe '#action_method' do
- it { expect(subject.action_method).to eq(:get) }
- end
-end
diff --git a/spec/lib/gitlab/ci/status/scheduled_spec.rb b/spec/lib/gitlab/ci/status/scheduled_spec.rb
index 8a923faf3f9..df72455d3c1 100644
--- a/spec/lib/gitlab/ci/status/scheduled_spec.rb
+++ b/spec/lib/gitlab/ci/status/scheduled_spec.rb
@@ -2,17 +2,17 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Status::Scheduled do
+RSpec.describe Gitlab::Ci::Status::Scheduled, feature_category: :continuous_integration do
subject do
described_class.new(double('subject'), double('user'))
end
describe '#text' do
- it { expect(subject.text).to eq 'delayed' }
+ it { expect(subject.text).to eq 'scheduled' }
end
describe '#label' do
- it { expect(subject.label).to eq 'delayed' }
+ it { expect(subject.label).to eq 'scheduled' }
end
describe '#icon' do
diff --git a/spec/lib/gitlab/ci/status/success_warning_spec.rb b/spec/lib/gitlab/ci/status/success_warning_spec.rb
index 86b826ad272..1725f90a0cf 100644
--- a/spec/lib/gitlab/ci/status/success_warning_spec.rb
+++ b/spec/lib/gitlab/ci/status/success_warning_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Status::SuccessWarning do
+RSpec.describe Gitlab::Ci::Status::SuccessWarning, feature_category: :continuous_integration do
let(:status) { double('status') }
subject do
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Ci::Status::SuccessWarning do
end
describe '#test' do
- it { expect(subject.text).to eq 'passed' }
+ it { expect(subject.text).to eq 'warning' }
end
describe '#label' do
diff --git a/spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..4f80ae0054b
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/Pages/zola_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Pages/Zola.gitlab-ci.yml', feature_category: :pages do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Pages/Zola') }
+
+ describe 'the created pipeline' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.first_owner }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: project.default_branch) }
+ let(:pipeline) { service.execute(:push).payload }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ end
+
+ it 'creates "pages" job' do
+ expect(build_names).to include('pages')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index 0a079a69682..e5324560944 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -33,38 +33,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
])
end
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it 'includes all predefined variables in a valid order' do
- keys = subject.pluck(:key)
-
- expect(keys).to contain_exactly(*%w[
- CI_PIPELINE_IID
- CI_PIPELINE_SOURCE
- CI_PIPELINE_CREATED_AT
- CI_COMMIT_SHA
- CI_COMMIT_SHORT_SHA
- CI_COMMIT_BEFORE_SHA
- CI_COMMIT_REF_NAME
- CI_COMMIT_REF_SLUG
- CI_COMMIT_BRANCH
- CI_COMMIT_MESSAGE
- CI_COMMIT_TITLE
- CI_COMMIT_DESCRIPTION
- CI_COMMIT_REF_PROTECTED
- CI_COMMIT_TIMESTAMP
- CI_COMMIT_AUTHOR
- CI_BUILD_REF
- CI_BUILD_BEFORE_SHA
- CI_BUILD_REF_NAME
- CI_BUILD_REF_SLUG
- ])
- end
- end
-
context 'when the pipeline is running for a tag' do
let(:pipeline) { build(:ci_empty_pipeline, :created, project: project, ref: 'test', tag: true) }
@@ -90,40 +58,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
CI_COMMIT_TAG_MESSAGE
])
end
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it 'includes all predefined variables in a valid order' do
- keys = subject.pluck(:key)
-
- expect(keys).to contain_exactly(*%w[
- CI_PIPELINE_IID
- CI_PIPELINE_SOURCE
- CI_PIPELINE_CREATED_AT
- CI_COMMIT_SHA
- CI_COMMIT_SHORT_SHA
- CI_COMMIT_BEFORE_SHA
- CI_COMMIT_REF_NAME
- CI_COMMIT_REF_SLUG
- CI_COMMIT_MESSAGE
- CI_COMMIT_TITLE
- CI_COMMIT_DESCRIPTION
- CI_COMMIT_REF_PROTECTED
- CI_COMMIT_TIMESTAMP
- CI_COMMIT_AUTHOR
- CI_BUILD_REF
- CI_BUILD_BEFORE_SHA
- CI_BUILD_REF_NAME
- CI_BUILD_REF_SLUG
- CI_COMMIT_TAG
- CI_COMMIT_TAG_MESSAGE
- CI_BUILD_TAG
- ])
- end
- end
end
context 'when merge request is present' do
@@ -365,21 +299,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
'CI_COMMIT_TAG_MESSAGE'
)
end
-
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- it 'does not expose tag variables' do
- expect(subject.to_hash.keys)
- .not_to include(
- 'CI_COMMIT_TAG',
- 'CI_COMMIT_TAG_MESSAGE',
- 'CI_BUILD_TAG'
- )
- end
- end
end
context 'without a commit' do
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 10974993fa4..6b296924b6d 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -154,151 +154,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
it { expect(subject.to_runner_variables).to eq(predefined_variables) }
- context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do
- before do
- stub_feature_flags(ci_remove_legacy_predefined_variables: false)
- end
-
- let(:predefined_variables) do
- [
- { key: 'CI_JOB_NAME',
- value: 'rspec:test 1' },
- { key: 'CI_JOB_NAME_SLUG',
- value: 'rspec-test-1' },
- { key: 'CI_JOB_STAGE',
- value: job.stage_name },
- { key: 'CI_NODE_TOTAL',
- value: '1' },
- { key: 'CI_ENVIRONMENT_NAME',
- value: 'test' },
- { key: 'CI_BUILD_NAME',
- value: 'rspec:test 1' },
- { key: 'CI_BUILD_STAGE',
- value: job.stage_name },
- { key: 'CI',
- value: 'true' },
- { key: 'GITLAB_CI',
- value: 'true' },
- { key: 'CI_SERVER_URL',
- value: Gitlab.config.gitlab.url },
- { key: 'CI_SERVER_HOST',
- value: Gitlab.config.gitlab.host },
- { key: 'CI_SERVER_PORT',
- value: Gitlab.config.gitlab.port.to_s },
- { key: 'CI_SERVER_PROTOCOL',
- value: Gitlab.config.gitlab.protocol },
- { key: 'CI_SERVER_SHELL_SSH_HOST',
- value: Gitlab.config.gitlab_shell.ssh_host.to_s },
- { key: 'CI_SERVER_SHELL_SSH_PORT',
- value: Gitlab.config.gitlab_shell.ssh_port.to_s },
- { key: 'CI_SERVER_NAME',
- value: 'GitLab' },
- { key: 'CI_SERVER_VERSION',
- value: Gitlab::VERSION },
- { key: 'CI_SERVER_VERSION_MAJOR',
- value: Gitlab.version_info.major.to_s },
- { key: 'CI_SERVER_VERSION_MINOR',
- value: Gitlab.version_info.minor.to_s },
- { key: 'CI_SERVER_VERSION_PATCH',
- value: Gitlab.version_info.patch.to_s },
- { key: 'CI_SERVER_REVISION',
- value: Gitlab.revision },
- { key: 'GITLAB_FEATURES',
- value: project.licensed_features.join(',') },
- { key: 'CI_PROJECT_ID',
- value: project.id.to_s },
- { key: 'CI_PROJECT_NAME',
- value: project.path },
- { key: 'CI_PROJECT_TITLE',
- value: project.title },
- { key: 'CI_PROJECT_DESCRIPTION',
- value: project.description },
- { key: 'CI_PROJECT_PATH',
- value: project.full_path },
- { key: 'CI_PROJECT_PATH_SLUG',
- value: project.full_path_slug },
- { key: 'CI_PROJECT_NAMESPACE',
- value: project.namespace.full_path },
- { key: 'CI_PROJECT_NAMESPACE_ID',
- value: project.namespace.id.to_s },
- { key: 'CI_PROJECT_ROOT_NAMESPACE',
- value: project.namespace.root_ancestor.path },
- { key: 'CI_PROJECT_URL',
- value: project.web_url },
- { key: 'CI_PROJECT_VISIBILITY',
- value: "private" },
- { key: 'CI_PROJECT_REPOSITORY_LANGUAGES',
- value: project.repository_languages.map(&:name).join(',').downcase },
- { key: 'CI_PROJECT_CLASSIFICATION_LABEL',
- value: project.external_authorization_classification_label },
- { key: 'CI_DEFAULT_BRANCH',
- value: project.default_branch },
- { key: 'CI_CONFIG_PATH',
- value: project.ci_config_path_or_default },
- { key: 'CI_PAGES_DOMAIN',
- value: Gitlab.config.pages.host },
- { key: 'CI_PAGES_URL',
- value: project.pages_url },
- { key: 'CI_API_V4_URL',
- value: API::Helpers::Version.new('v4').root_url },
- { key: 'CI_API_GRAPHQL_URL',
- value: Gitlab::Routing.url_helpers.api_graphql_url },
- { key: 'CI_TEMPLATE_REGISTRY_HOST',
- value: template_registry_host },
- { key: 'CI_PIPELINE_IID',
- value: pipeline.iid.to_s },
- { key: 'CI_PIPELINE_SOURCE',
- value: pipeline.source },
- { key: 'CI_PIPELINE_CREATED_AT',
- value: pipeline.created_at.iso8601 },
- { key: 'CI_COMMIT_SHA',
- value: job.sha },
- { key: 'CI_COMMIT_SHORT_SHA',
- value: job.short_sha },
- { key: 'CI_COMMIT_BEFORE_SHA',
- value: job.before_sha },
- { key: 'CI_COMMIT_REF_NAME',
- value: job.ref },
- { key: 'CI_COMMIT_REF_SLUG',
- value: job.ref_slug },
- { key: 'CI_COMMIT_BRANCH',
- value: job.ref },
- { key: 'CI_COMMIT_MESSAGE',
- value: pipeline.git_commit_message },
- { key: 'CI_COMMIT_TITLE',
- value: pipeline.git_commit_title },
- { key: 'CI_COMMIT_DESCRIPTION',
- value: pipeline.git_commit_description },
- { key: 'CI_COMMIT_REF_PROTECTED',
- value: (!!pipeline.protected_ref?).to_s },
- { key: 'CI_COMMIT_TIMESTAMP',
- value: pipeline.git_commit_timestamp },
- { key: 'CI_COMMIT_AUTHOR',
- value: pipeline.git_author_full_text },
- { key: 'CI_BUILD_REF',
- value: job.sha },
- { key: 'CI_BUILD_BEFORE_SHA',
- value: job.before_sha },
- { key: 'CI_BUILD_REF_NAME',
- value: job.ref },
- { key: 'CI_BUILD_REF_SLUG',
- value: job.ref_slug },
- { key: 'YAML_VARIABLE',
- value: 'value' },
- { key: 'GITLAB_USER_ID',
- value: user.id.to_s },
- { key: 'GITLAB_USER_EMAIL',
- value: user.email },
- { key: 'GITLAB_USER_LOGIN',
- value: user.username },
- { key: 'GITLAB_USER_NAME',
- value: user.name }
- ].map { |var| var.merge(public: true, masked: false) }
- end
-
- it { expect(subject.to_runner_variables).to eq(predefined_variables) }
- end
-
context 'variables ordering' do
def var(name, value)
{ key: name, value: value.to_s, public: true, masked: false }
diff --git a/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb b/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
deleted file mode 100644
index cb13a711857..00000000000
--- a/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require 'puma_worker_killer'
-
-RSpec.describe Gitlab::Cluster::PumaWorkerKillerInitializer do
- describe '.start' do
- context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is false' do
- before do
- stub_env('GITLAB_MEMORY_WATCHDOG_ENABLED', 'false')
- end
-
- it 'configures and start PumaWorkerKiller' do
- expect(PumaWorkerKiller).to receive(:config)
- expect(PumaWorkerKiller).to receive(:start)
-
- described_class.start({})
- end
- end
-
- context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is not set' do
- it 'configures and start PumaWorkerKiller' do
- expect(PumaWorkerKiller).not_to receive(:config)
- expect(PumaWorkerKiller).not_to receive(:start)
-
- described_class.start({})
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb b/spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb
deleted file mode 100644
index cf532cf7be6..00000000000
--- a/spec/lib/gitlab/cluster/puma_worker_killer_observer_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Cluster::PumaWorkerKillerObserver do
- let(:counter) { Gitlab::Metrics::NullMetric.instance }
-
- before do
- allow(Gitlab::Metrics).to receive(:counter)
- .with(any_args)
- .and_return(counter)
- end
-
- describe '#callback' do
- subject { described_class.new }
-
- it 'increments timeout counter' do
- worker = double(index: 0)
-
- expect(counter).to receive(:increment)
-
- subject.callback.call(worker)
- end
- end
-end
diff --git a/spec/lib/gitlab/container_repository/tags/cache_spec.rb b/spec/lib/gitlab/container_repository/tags/cache_spec.rb
index fcfc8e7a348..4b8c843eb3a 100644
--- a/spec/lib/gitlab/container_repository/tags/cache_spec.rb
+++ b/spec/lib/gitlab/container_repository/tags/cache_spec.rb
@@ -81,7 +81,9 @@ RSpec.describe ::Gitlab::ContainerRepository::Tags::Cache, :clean_gitlab_redis_c
::Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
+
+ expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
expect(pipeline)
.to receive(:set)
.with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i)
diff --git a/spec/lib/gitlab/counters/buffered_counter_spec.rb b/spec/lib/gitlab/counters/buffered_counter_spec.rb
index 2d5209161d9..4fd152eb805 100644
--- a/spec/lib/gitlab/counters/buffered_counter_spec.rb
+++ b/spec/lib/gitlab/counters/buffered_counter_spec.rb
@@ -244,43 +244,6 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(project_statistics_bulk_increment: false)
- end
-
- context 'when the counter is not undergoing refresh' do
- it 'sets a new key by the given value' do
- counter.increment(increment)
-
- expect(counter.get).to eq(increment.amount)
- end
-
- it 'increments an existing key by the given value' do
- counter.increment(other_increment)
- counter.increment(increment)
-
- expect(counter.get).to eq(other_increment.amount + increment.amount)
- end
- end
-
- context 'when the counter is undergoing refresh' do
- before do
- counter.initiate_refresh!
- end
-
- context 'when it is a decrement (negative amount)' do
- let(:decrement) { Gitlab::Counters::Increment.new(amount: -123, ref: 3) }
-
- it 'immediately decrements the counter key to negative' do
- counter.increment(decrement)
-
- expect(counter.get).to eq(decrement.amount)
- end
- end
- end
- end
end
describe '#bulk_increment' do
@@ -416,44 +379,6 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(project_statistics_bulk_increment: false)
- end
-
- context 'when the counter is not undergoing refresh' do
- it 'sets a new key by the given value' do
- counter.bulk_increment(increments)
-
- expect(counter.get).to eq(increments.sum(&:amount))
- end
-
- it 'increments an existing key by the given value' do
- counter.increment(other_increment)
-
- result = counter.bulk_increment(increments)
-
- expect(result).to eq(other_increment.amount + increments.sum(&:amount))
- end
- end
-
- context 'when the counter is undergoing refresh' do
- before do
- counter.initiate_refresh!
- end
-
- context 'when it is a decrement (negative amount)' do
- let(:decrement) { Gitlab::Counters::Increment.new(amount: -123, ref: 3) }
-
- it 'immediately decrements the counter key to negative' do
- counter.bulk_increment([decrement])
-
- expect(counter.get).to eq(decrement.amount)
- end
- end
- end
- end
end
describe '#initiate_refresh!' do
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index eb348f5b497..351872ffbc5 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::DataBuilder::Pipeline do
+RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_integration do
let_it_be(:user) { create(:user, :public_email) }
let_it_be(:project) { create(:project, :repository) }
@@ -26,6 +26,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
it 'has correct attributes', :aggregate_failures do
expect(attributes).to be_a(Hash)
+ expect(attributes[:name]).to be_nil
expect(attributes[:ref]).to eq(pipeline.ref)
expect(attributes[:sha]).to eq(pipeline.sha)
expect(attributes[:tag]).to eq(pipeline.tag)
@@ -33,6 +34,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(attributes[:iid]).to eq(pipeline.iid)
expect(attributes[:source]).to eq(pipeline.source)
expect(attributes[:status]).to eq(pipeline.status)
+ expect(attributes[:url]).to eq(Gitlab::Routing.url_helpers.project_pipeline_url(pipeline.project, pipeline))
expect(attributes[:detailed_status]).to eq('passed')
expect(build_data).to be_a(Hash)
expect(build_data[:id]).to eq(build.id)
@@ -53,6 +55,16 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(data[:source_pipeline]).to be_nil
end
+ context 'pipeline with metadata' do
+ let_it_be_with_reload(:pipeline_metadata) do
+ create(:ci_pipeline_metadata, pipeline: pipeline, name: "My Pipeline")
+ end
+
+ it 'has pipeline name', :aggregate_failures do
+ expect(attributes[:name]).to eq("My Pipeline")
+ end
+ end
+
context 'build with runner' do
let_it_be(:tag_names) { %w(tag-1 tag-2) }
let_it_be(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n) }) }
diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
index 51a09ba0b5e..0454e7e72f4 100644
--- a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
@@ -12,11 +12,12 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
- let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection_name) { Gitlab::Database::PRIMARY_DATABASE_NAME }
+ let(:model) { Gitlab::Database.database_base_models[connection_name] }
let(:connection) { model.connection }
let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_key) { "gitlab/database/asyncddl/actions/#{connection_name}" }
let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
around do |example|
@@ -51,7 +52,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: expected_message))
+ .with(a_hash_including(message: expected_message, connection_name: connection_name.to_s))
end
end
@@ -85,11 +86,11 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Starting async index creation'))
+ .with(a_hash_including(message: 'Starting async index creation', connection_name: connection_name.to_s))
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Finished async index creation'))
+ .with(a_hash_including(message: 'Finished async index creation', connection_name: connection_name.to_s))
end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
index 7f0febdcacd..384c541256c 100644
--- a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
@@ -12,11 +12,12 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
- let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection_name) { Gitlab::Database::PRIMARY_DATABASE_NAME }
+ let(:model) { Gitlab::Database.database_base_models[connection_name] }
let(:connection) { model.connection }
let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_key) { "gitlab/database/asyncddl/actions/#{connection_name}" }
let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
before do
@@ -55,7 +56,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: expected_message))
+ .with(a_hash_including(message: expected_message, connection_name: connection_name.to_s))
end
end
@@ -91,11 +92,11 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Starting async index removal'))
+ .with(a_hash_including(message: 'Starting async index removal', connection_name: connection_name.to_s))
expect(Gitlab::AppLogger)
.to have_received(:info)
- .with(a_hash_including(message: 'Finished async index removal'))
+ .with(a_hash_including(message: 'Finished async index removal', connection_name: connection_name.to_s))
end
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
index 5e9d4f78a4a..9e37124ba28 100644
--- a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
@@ -6,6 +6,9 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model,
it { is_expected.to be_a Gitlab::Database::SharedModel }
describe 'validations' do
+ subject(:model) { build(:postgres_async_index) }
+
+ let(:table_name_limit) { described_class::MAX_TABLE_NAME_LENGTH }
let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH }
let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH }
let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH }
@@ -13,10 +16,45 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model,
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) }
it { is_expected.to validate_presence_of(:table_name) }
- it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) }
+ it { is_expected.to validate_length_of(:table_name).is_at_most(table_name_limit) }
it { is_expected.to validate_presence_of(:definition) }
it { is_expected.to validate_length_of(:definition).is_at_most(definition_limit) }
it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) }
+
+ shared_examples 'table_name is invalid' do
+ before do
+ model.table_name = table_name
+ end
+
+ it 'is invalid' do
+ expect(model).to be_invalid
+ expect(model.errors).to have_key(:table_name)
+ end
+ end
+
+ context 'when passing a long schema name' do
+ let(:table_name) { "#{'schema_name' * 10}.table_name" }
+
+ it_behaves_like 'table_name is invalid'
+ end
+
+ context 'when passing a long table name' do
+ let(:table_name) { "schema_name.#{'table_name' * 10}" }
+
+ it_behaves_like 'table_name is invalid'
+ end
+
+ context 'when passing a long table name and schema name' do
+ let(:table_name) { "#{'schema_name' * 10}.#{'table_name' * 10}" }
+
+ it_behaves_like 'table_name is invalid'
+ end
+
+ context 'when invalid table name is given' do
+ let(:table_name) { 'a.b.c' }
+
+ it_behaves_like 'table_name is invalid'
+ end
end
describe 'scopes' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index 4ef2e7f936b..0faa468233d 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner, feature_category: :database do
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
let(:migration_wrapper) { double('test wrapper') }
@@ -15,8 +15,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
before do
- normal_signal = instance_double(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal, stop?: false)
- allow(Gitlab::Database::BackgroundMigration::HealthStatus).to receive(:evaluate).and_return([normal_signal])
+ normal_signal = instance_double(Gitlab::Database::HealthStatus::Signals::Normal, stop?: false)
+ allow(Gitlab::Database::HealthStatus).to receive(:evaluate).and_return([normal_signal])
end
describe '#run_migration_job' do
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
context 'migration health' do
- let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus }
+ let(:health_status) { Gitlab::Database::HealthStatus }
let(:stop_signal) { health_status::Signals::Stop.new(:indicator, reason: 'Take a break') }
let(:normal_signal) { health_status::Signals::Normal.new(:indicator, reason: 'All good') }
let(:not_available_signal) { health_status::Signals::NotAvailable.new(:indicator, reason: 'Indicator is disabled') }
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 546f9353808..213dee0d19d 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -46,6 +46,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(batched_migration.status_name).to be :finished
end
+
+ it 'updates the finished_at' do
+ freeze_time do
+ expect { batched_migration.finish! }.to change(batched_migration, :finished_at).from(nil).to(Time.current)
+ end
+ end
end
end
@@ -173,52 +179,6 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
- describe '.active_migration' do
- let(:connection) { Gitlab::Database.database_base_models[:main].connection }
- let!(:migration1) { create(:batched_background_migration, :finished) }
-
- subject(:active_migration) { described_class.active_migration(connection: connection) }
-
- around do |example|
- Gitlab::Database::SharedModel.using_connection(connection) do
- example.run
- end
- end
-
- context 'when there are no migrations on hold' do
- let!(:migration2) { create(:batched_background_migration, :active) }
- let!(:migration3) { create(:batched_background_migration, :active) }
-
- it 'returns the first active migration according to queue order' do
- expect(active_migration).to eq(migration2)
- end
- end
-
- context 'when there are migrations on hold' do
- let!(:migration2) { create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now) }
- let!(:migration3) { create(:batched_background_migration, :active, on_hold_until: 2.minutes.ago) }
-
- it 'returns the first active migration that is not on hold according to queue order' do
- expect(active_migration).to eq(migration3)
- end
- end
-
- context 'when there are migrations not available for the current connection' do
- let!(:migration2) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_not_existing) }
- let!(:migration3) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_main) }
-
- it 'returns the first active migration that is available for the current connection' do
- expect(active_migration).to eq(migration3)
- end
- end
-
- context 'when there are no active migrations available' do
- it 'returns nil' do
- expect(active_migration).to eq(nil)
- end
- end
- end
-
describe '.find_executable' do
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
let(:migration_id) { migration.id }
diff --git a/spec/lib/gitlab/database/background_migration/health_status_spec.rb b/spec/lib/gitlab/database/background_migration/health_status_spec.rb
deleted file mode 100644
index 4d6c729f080..00000000000
--- a/spec/lib/gitlab/database/background_migration/health_status_spec.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus, feature_category: :database do
- let(:connection) { Gitlab::Database.database_base_models[:main].connection }
-
- around do |example|
- Gitlab::Database::SharedModel.using_connection(connection) do
- example.run
- end
- end
-
- describe '.evaluate' do
- subject(:evaluate) { described_class.evaluate(migration, [autovacuum_indicator_class]) }
-
- let(:migration) { build(:batched_background_migration, :active) }
-
- let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus }
- let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable }
- let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog }
- let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex }
- let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) }
- let(:wal_indicator) { instance_double(wal_indicator_class) }
- let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) }
-
- before do
- allow(autovacuum_indicator_class).to receive(:new).with(migration.health_context).and_return(autovacuum_indicator)
- end
-
- context 'with default indicators' do
- subject(:evaluate) { described_class.evaluate(migration) }
-
- it 'returns a collection of signals' do
- normal_signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
- not_available_signal = instance_double("#{health_status}::Signals::NotAvailable", log_info?: false)
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal)
- expect(wal_indicator_class).to receive(:new).with(migration.health_context).and_return(wal_indicator)
- expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal)
- expect(patroni_apdex_indicator_class).to receive(:new).with(migration.health_context)
- .and_return(patroni_apdex_indicator)
- expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal)
-
- expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal)
- end
- end
-
- it 'returns a collection of signals' do
- signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
-
- expect(evaluate).to contain_exactly(signal)
- end
-
- it 'logs interesting signals' do
- signal = instance_double(
- "#{health_status}::Signals::Stop",
- log_info?: true,
- indicator_class: autovacuum_indicator_class,
- short_name: 'Stop',
- reason: 'Test Exception'
- )
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
-
- expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with(
- migration_id: migration.id,
- health_status_indicator: autovacuum_indicator_class.to_s,
- indicator_signal: 'Stop',
- signal_reason: 'Test Exception',
- message: "#{migration} signaled: #{signal}"
- )
-
- evaluate
- end
-
- it 'does not log signals of no interest' do
- signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
-
- expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
- expect(described_class).not_to receive(:log_signal)
-
- evaluate
- end
-
- context 'on indicator error' do
- let(:error) { RuntimeError.new('everything broken') }
-
- before do
- expect(autovacuum_indicator).to receive(:evaluate).and_raise(error)
- end
-
- it 'does not fail' do
- expect { evaluate }.not_to raise_error
- end
-
- it 'returns Unknown signal' do
- signal = evaluate.first
-
- expect(signal).to be_an_instance_of(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
- expect(signal.reason).to eq("unexpected error: everything broken (RuntimeError)")
- end
-
- it 'reports the exception to error tracking' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(error, migration_id: migration.id, job_class_name: migration.job_class_name)
-
- evaluate
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb b/spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb
new file mode 100644
index 00000000000..32766b0d937
--- /dev/null
+++ b/spec/lib/gitlab/database/convert_feature_category_to_group_label_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::ConvertFeatureCategoryToGroupLabel, feature_category: :database do
+ describe '#execute' do
+ subject(:group_label) { described_class.new(feature_category).execute }
+
+ let_it_be(:stages_fixture) do
+ { stages: { manage: { groups: { database: { categories: ['database'] } } } } }
+ end
+
+ before do
+ stub_request(:get, 'https://gitlab.com/gitlab-com/www-gitlab-com/-/raw/master/data/stages.yml')
+ .to_return(status: 200, body: stages_fixture.to_json, headers: {})
+ end
+
+ context 'when the group label exists' do
+ let(:feature_category) { 'database' }
+
+ it 'returns a group label' do
+ expect(group_label).to eql 'group::database'
+ end
+ end
+
+ context 'when the group label does not exist' do
+ let(:feature_category) { 'non_existing_feature_category_test' }
+
+ it 'returns nil' do
+ expect(group_label).to be nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/database_connection_info_spec.rb b/spec/lib/gitlab/database/database_connection_info_spec.rb
new file mode 100644
index 00000000000..c87fd61268d
--- /dev/null
+++ b/spec/lib/gitlab/database/database_connection_info_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::DatabaseConnectionInfo, feature_category: :cell do
+ let(:default_attributes) do
+ {
+ name: 'main',
+ gitlab_schemas: ['gitlab_main'],
+ klass: 'ActiveRecord::Base'
+ }
+ end
+
+ let(:attributes) { default_attributes }
+
+ subject { described_class.new(attributes) }
+
+ describe '.new' do
+ let(:attributes) { default_attributes.merge(fallback_database: 'fallback') }
+
+ it 'does convert attributes into symbols and objects' do
+ expect(subject.name).to be_a(Symbol)
+ expect(subject.gitlab_schemas).to all(be_a(Symbol))
+ expect(subject.klass).to be(ActiveRecord::Base)
+ expect(subject.fallback_database).to be_a(Symbol)
+ expect(subject.db_dir).to be_a(Pathname)
+ end
+
+ it 'does raise error when using invalid argument' do
+ expect { described_class.new(invalid: 'aa') }.to raise_error ArgumentError, /unknown keywords: invalid/
+ end
+ end
+
+ describe '.load_file' do
+ it 'does load YAML file and has file_path specified' do
+ file_path = Rails.root.join('db/database_connections/main.yaml')
+ db_info = described_class.load_file(file_path)
+
+ expect(db_info).not_to be_nil
+ expect(db_info.file_path).to eq(file_path)
+ end
+ end
+
+ describe '#connection_class' do
+ context 'when klass is "ActiveRecord::Base"' do
+ let(:attributes) { default_attributes.merge(klass: 'ActiveRecord::Base') }
+
+ it 'does always return "ActiveRecord::Base"' do
+ expect(subject.connection_class).to eq(ActiveRecord::Base)
+ end
+ end
+
+ context 'when klass is "Ci::ApplicationRecord"' do
+ let(:attributes) { default_attributes.merge(klass: 'Ci::ApplicationRecord') }
+
+ it 'does return "Ci::ApplicationRecord" when it is connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(true)
+
+ expect(subject.connection_class).to eq(Ci::ApplicationRecord)
+ end
+
+ it 'does return nil when it is not connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(false)
+
+ expect(subject.connection_class).to eq(nil)
+ end
+ end
+ end
+
+ describe '#order' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:configs_for) { %w[main ci geo] }
+
+ before do
+ hash_configs = configs_for.map do |x|
+ instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, name: x)
+ end
+ allow(::ActiveRecord::Base).to receive(:configurations).and_return(
+ instance_double(ActiveRecord::DatabaseConfigurations, configs_for: hash_configs)
+ )
+ end
+
+ where(:name, :order) do
+ :main | 0
+ :ci | 1
+ :undefined | 1000
+ end
+
+ with_them do
+ let(:attributes) { default_attributes.merge(name: name) }
+
+ it { expect(subject.order).to eq(order) }
+ end
+ end
+
+ describe '#connection_class_or_fallback' do
+ let(:all_databases) do
+ {
+ main: described_class.new(
+ name: 'main', gitlab_schemas: [], klass: 'ActiveRecord::Base'),
+ ci: described_class.new(
+ name: 'ci', gitlab_schemas: [], klass: 'Ci::ApplicationRecord', fallback_database: 'main')
+ }
+ end
+
+ context 'for "main"' do
+ it 'does return ActiveRecord::Base' do
+ expect(all_databases[:main].connection_class_or_fallback(all_databases))
+ .to eq(ActiveRecord::Base)
+ end
+ end
+
+ context 'for "ci"' do
+ it 'does return "Ci::ApplicationRecord" when it is connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(true)
+
+ expect(all_databases[:ci].connection_class_or_fallback(all_databases))
+ .to eq(Ci::ApplicationRecord)
+ end
+
+ it 'does return "ActiveRecord::Base" (fallback to "main") when it is not connection_class' do
+ expect(Ci::ApplicationRecord).to receive(:connection_class).and_return(false)
+
+ expect(all_databases[:ci].connection_class_or_fallback(all_databases))
+ .to eq(ActiveRecord::Base)
+ end
+ end
+ end
+
+ describe '#has_gitlab_shared?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:gitlab_schemas, :result) do
+ %w[gitlab_main] | false
+ %w[gitlab_main gitlab_shared] | true
+ end
+
+ with_them do
+ let(:attributes) { default_attributes.merge(gitlab_schemas: gitlab_schemas) }
+
+ it { expect(subject.has_gitlab_shared?).to eq(result) }
+ end
+ end
+
+ describe 'db_docs_dir' do
+ let(:attributes) { default_attributes.merge(db_dir: db_dir) }
+
+ context 'when db_dir is specified' do
+ let(:db_dir) { 'ee/my/db' }
+
+ it { expect(subject.db_docs_dir).to eq(Rails.root.join(db_dir, 'docs')) }
+ end
+
+ context 'when db_dir is not specified fallbacks to "db/docs"' do
+ let(:db_dir) { nil }
+
+ it { expect(subject.db_docs_dir).to eq(Rails.root.join('db/docs')) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index 75b543bee85..2653297c81a 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -70,11 +70,13 @@ RSpec.describe Gitlab::Database::EachDatabase do
# Clear the memoization because the return of Gitlab::Database#schemas_to_base_models depends stubbed value
clear_memoization(:@schemas_to_base_models)
- clear_memoization(:@schemas_to_base_models_ee)
end
it 'only yields the unshared connections' do
- expect(Gitlab::Database).to receive(:db_config_share_with).exactly(3).times.and_return(nil, 'main', 'main')
+ # if this is `non-main` connection make it shared with `main`
+ allow(Gitlab::Database).to receive(:db_config_share_with) do |db_config|
+ db_config.name != 'main' ? 'main' : nil
+ end
expect { |b| described_class.each_database_connection(include_shared: false, &b) }
.to yield_successive_args([ActiveRecord::Base.connection, 'main'])
diff --git a/spec/lib/gitlab/database/gitlab_schema_info_spec.rb b/spec/lib/gitlab/database/gitlab_schema_info_spec.rb
new file mode 100644
index 00000000000..b37aec46de8
--- /dev/null
+++ b/spec/lib/gitlab/database/gitlab_schema_info_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::GitlabSchemaInfo, feature_category: :cell do
+ describe '.new' do
+ it 'does ensure that name is always symbol' do
+ schema_info = described_class.new(name: 'gitlab_main')
+ expect(schema_info.name).to eq(:gitlab_main)
+ end
+
+ it 'does raise error when using invalid argument' do
+ expect { described_class.new(invalid: 'aa') }.to raise_error ArgumentError, /unknown keywords: invalid/
+ end
+ end
+
+ describe '.load_file' do
+ it 'does load YAML file and has file_path specified' do
+ file_path = Rails.root.join('db/gitlab_schemas/gitlab_main.yaml')
+ schema_info = described_class.load_file(file_path)
+
+ expect(schema_info).not_to be_nil
+ expect(schema_info.file_path).to eq(file_path)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 5d3260a77c9..48f5cdb995b 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -20,12 +20,6 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
shared_examples 'maps table name to table schema' do
using RSpec::Parameterized::TableSyntax
- before do
- ApplicationRecord.connection.execute(<<~SQL)
- CREATE INDEX index_name_on_table_belonging_to_gitlab_main ON public.projects (name);
- SQL
- end
-
where(:name, :classification) do
'ci_builds' | :gitlab_ci
'my_schema.ci_builds' | :gitlab_ci
@@ -37,7 +31,6 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
'_test_gitlab_ci_table' | :gitlab_ci
'_test_my_table' | :gitlab_shared
'pg_attribute' | :gitlab_internal
- 'index_name_on_table_belonging_to_gitlab_main' | :gitlab_main
end
with_them do
@@ -52,53 +45,72 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.views_and_tables_to_schema' do
include_examples 'validate schema data', described_class.views_and_tables_to_schema
- # This being run across different databases indirectly also tests
- # a general consistency of structure across databases
- Gitlab::Database.database_base_models.except(:geo).each do |db_config_name, db_class|
- context "for #{db_config_name} using #{db_class}" do
- let(:db_data_sources) { db_class.connection.data_sources }
+ # group configurations by db_docs_dir, since then we expect all sharing this
+ # to contain exactly those tables
+ Gitlab::Database.all_database_connections.values.group_by(&:db_docs_dir).each do |db_docs_dir, db_infos|
+ context "for #{db_docs_dir}" do
+ let(:all_gitlab_schemas) { db_infos.flat_map(&:gitlab_schemas).to_set }
- # The embedding and Geo databases do not share the same structure as all decomposed databases
- subject do
- described_class.views_and_tables_to_schema.reject { |_, v| v == :gitlab_embedding || v == :gitlab_geo }
+ let(:tables_for_gitlab_schemas) do
+ described_class.views_and_tables_to_schema.select do |_, gitlab_schema|
+ all_gitlab_schemas.include?(gitlab_schema)
+ end
end
- it 'new data sources are added' do
- missing_data_sources = db_data_sources.to_set - subject.keys
-
- expect(missing_data_sources).to be_empty, \
- "Missing table/view(s) #{missing_data_sources.to_a} not found in " \
- "#{described_class}.views_and_tables_to_schema. " \
- "Any new tables or views must be added to the database dictionary. " \
- "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
- end
-
- it 'non-existing data sources are removed' do
- extra_data_sources = subject.keys.to_set - db_data_sources
-
- expect(extra_data_sources).to be_empty, \
- "Extra table/view(s) #{extra_data_sources.to_a} found in #{described_class}.views_and_tables_to_schema. " \
- "Any removed or renamed tables or views must be removed from the database dictionary. " \
- "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ db_infos.to_h { |db_info| [db_info.name, db_info.connection_class] }
+ .compact.each do |db_config_name, connection_class|
+ context "validates '#{db_config_name}' using '#{connection_class}'" do
+ let(:data_sources) { connection_class.connection.data_sources }
+
+ it 'new data sources are added' do
+ missing_data_sources = data_sources.to_set - tables_for_gitlab_schemas.keys
+
+ expect(missing_data_sources).to be_empty, \
+ "Missing table/view(s) #{missing_data_sources.to_a} not found in " \
+ "#{described_class}.views_and_tables_to_schema. " \
+ "Any new tables or views must be added to the database dictionary. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
+
+ it 'non-existing data sources are removed' do
+ extra_data_sources = tables_for_gitlab_schemas.keys.to_set - data_sources
+
+ expect(extra_data_sources).to be_empty, \
+ "Extra table/view(s) #{extra_data_sources.to_a} found in " \
+ "#{described_class}.views_and_tables_to_schema. " \
+ "Any removed or renamed tables or views must be removed from the database dictionary. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
+ end
end
end
end
- end
- describe '.dictionary_path_globs' do
- include_examples 'validate path globs', described_class.dictionary_path_globs
- end
+ it 'all tables and views are unique' do
+ table_and_view_names = described_class.build_dictionary('')
+ table_and_view_names += described_class.build_dictionary('views')
- describe '.view_path_globs' do
- include_examples 'validate path globs', described_class.view_path_globs
- end
+ # ignore gitlab_internal due to `ar_internal_metadata`, `schema_migrations`
+ table_and_view_names = table_and_view_names
+ .reject { |_, gitlab_schema| gitlab_schema == :gitlab_internal }
- describe '.deleted_tables_path_globs' do
- include_examples 'validate path globs', described_class.deleted_tables_path_globs
+ duplicated_tables = table_and_view_names
+ .group_by(&:first)
+ .select { |_, schemas| schemas.count > 1 }
+ .keys
+
+ expect(duplicated_tables).to be_empty, \
+ "Duplicated table(s) #{duplicated_tables.to_a} found in #{described_class}.views_and_tables_to_schema. " \
+ "Any duplicated table must be removed from db/docs/ or ee/db/docs/. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
end
- describe '.deleted_views_path_globs' do
- include_examples 'validate path globs', described_class.deleted_views_path_globs
+ describe '.dictionary_path_globs' do
+ include_examples 'validate path globs', described_class.dictionary_path_globs('')
+ include_examples 'validate path globs', described_class.dictionary_path_globs('views')
+ include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_views')
+ include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_tables')
end
describe '.tables_to_schema' do
@@ -128,7 +140,7 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
describe '.table_schemas!' do
- let(:tables) { %w[users projects ci_builds] }
+ let(:tables) { %w[projects issues ci_builds] }
subject { described_class.table_schemas!(tables) }
@@ -137,7 +149,7 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
context 'when one of the tables does not have a matching table schema' do
- let(:tables) { %w[users projects unknown ci_builds] }
+ let(:tables) { %w[namespaces projects unknown ci_builds] }
it 'raises error' do
expect { subject }.to raise_error(/Could not find gitlab schema for table unknown/)
@@ -155,6 +167,18 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
it { is_expected.to be_nil }
end
+
+ context 'when an index name is used as the table name' do
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE INDEX index_on_projects ON public.projects USING gin (name gin_trgm_ops)
+ SQL
+ end
+
+ let(:name) { 'index_on_projects' }
+
+ it { is_expected.to be_nil }
+ end
end
describe '.table_schema!' do
@@ -175,4 +199,82 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
end
end
+
+ context 'when testing cross schema access' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(Gitlab::Database).to receive(:all_gitlab_schemas).and_return(
+ [
+ Gitlab::Database::GitlabSchemaInfo.new(
+ name: "gitlab_main_clusterwide",
+ allow_cross_joins: %i[gitlab_shared gitlab_main],
+ allow_cross_transactions: %i[gitlab_internal gitlab_shared gitlab_main],
+ allow_cross_foreign_keys: %i[gitlab_main]
+ ),
+ Gitlab::Database::GitlabSchemaInfo.new(
+ name: "gitlab_main",
+ allow_cross_joins: %i[gitlab_shared],
+ allow_cross_transactions: %i[gitlab_internal gitlab_shared],
+ allow_cross_foreign_keys: %i[]
+ ),
+ Gitlab::Database::GitlabSchemaInfo.new(
+ name: "gitlab_ci",
+ allow_cross_joins: %i[gitlab_shared],
+ allow_cross_transactions: %i[gitlab_internal gitlab_shared],
+ allow_cross_foreign_keys: %i[]
+ )
+ ].index_by(&:name)
+ )
+ end
+
+ describe '.cross_joins_allowed?' do
+ where(:schemas, :result) do
+ %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | false
+ %i[gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | true
+ end
+
+ with_them do
+ it { expect(described_class.cross_joins_allowed?(schemas)).to eq(result) }
+ end
+ end
+
+ describe '.cross_transactions_allowed?' do
+ where(:schemas, :result) do
+ %i[] | true
+ %i[gitlab_main_clusterwide gitlab_main] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | true
+ %i[gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | true
+ end
+
+ with_them do
+ it { expect(described_class.cross_transactions_allowed?(schemas)).to eq(result) }
+ end
+ end
+
+ describe '.cross_foreign_key_allowed?' do
+ where(:schemas, :result) do
+ %i[] | false
+ %i[gitlab_main_clusterwide gitlab_main] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | false
+ %i[gitlab_main gitlab_ci] | false
+ %i[gitlab_main_clusterwide gitlab_shared] | false
+ end
+
+ with_them do
+ it { expect(described_class.cross_foreign_key_allowed?(schemas)).to eq(result) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
index 1c0f5a0c420..cd145bd5c0f 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::AutovacuumActiveOnTable,
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::AutovacuumActiveOnTable,
feature_category: :database do
include Database::DatabaseHelpers
@@ -23,11 +23,18 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:tables) { [table] }
let(:table) { 'users' }
- let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(connection, tables) }
+ let(:context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ tables,
+ :gitlab_main
+ )
+ end
context 'without autovacuum activity' do
it 'returns Normal signal' do
- expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
end
it 'remembers the indicator class' do
@@ -41,7 +48,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
end
it 'returns Stop signal' do
- expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
end
it 'explains why' do
@@ -55,7 +62,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
it 'returns NoSignal signal in case the feature flag is disabled' do
stub_feature_flags(batched_migrations_health_status_autovacuum: false)
- expect(subject).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
end
end
end
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb b/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb
index d3102a105ea..e0e3a0a7c23 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/patroni_apdex_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength
let(:schema) { :main }
let(:connection) { Gitlab::Database.database_base_models[schema].connection }
@@ -19,8 +19,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) }
let(:context) do
- Gitlab::Database::BackgroundMigration::HealthStatus::Context
- .new(connection, ['users'], gitlab_schema)
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ ['users'],
+ gitlab_schema
+ )
end
let(:gitlab_schema) { "gitlab_#{schema}" }
@@ -61,7 +65,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
it 'returns NoSignal signal in case the feature flag is disabled' do
stub_feature_flags(batched_migrations_health_status_patroni_apdex: false)
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
expect(evaluate.reason).to include('indicator disabled')
end
@@ -69,7 +73,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:database_apdex_settings) { nil }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Patroni Apdex Settings not configured')
end
end
@@ -78,7 +82,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:client_ready) { false }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Prometheus client is not ready')
end
end
@@ -87,7 +91,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:"database_apdex_sli_query_#{schema}") { nil }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Apdex SLI query is not configured')
end
end
@@ -96,7 +100,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
let(:"database_apdex_slo_#{schema}") { nil }
it 'returns Unknown signal' do
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Apdex SLO is not configured')
end
end
@@ -105,7 +109,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
expect(prometheus_client).to receive(:query)
.with(send("database_apdex_sli_query_#{schema}"))
.and_return([{ "value" => [1662423310.878, apdex_slo_above_sli[schema]] }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
expect(evaluate.reason).to include('Patroni service apdex is above SLO')
end
@@ -113,7 +117,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
expect(prometheus_client).to receive(:query)
.with(send("database_apdex_sli_query_#{schema}"))
.and_return([{ "value" => [1662423310.878, apdex_slo_below_sli[schema]] }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
expect(evaluate.reason).to include('Patroni service apdex is below SLO')
end
@@ -131,7 +135,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
with_them do
it 'returns Unknown signal' do
expect(prometheus_client).to receive(:query).and_return(result)
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Unknown)
expect(evaluate.reason).to include('Patroni service apdex can not be calculated')
end
end
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb b/spec/lib/gitlab/database/health_status/indicators/write_ahead_log_spec.rb
index 650f11e3cd5..aa2aee4f94a 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/write_ahead_log_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::WriteAheadLog do
+RSpec.describe Gitlab::Database::HealthStatus::Indicators::WriteAheadLog, feature_category: :database do
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
around do |example|
@@ -14,7 +14,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
describe '#evaluate' do
let(:tables) { [table] }
let(:table) { 'users' }
- let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(connection, tables) }
+ let(:context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ described_class,
+ connection,
+ tables,
+ :gitlab_main
+ )
+ end
subject(:evaluate) { described_class.new(context).evaluate }
@@ -25,14 +32,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
it 'returns NoSignal signal in case the feature flag is disabled' do
stub_feature_flags(batched_migrations_health_status_wal: false)
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
expect(evaluate.reason).to include('indicator disabled')
end
it 'returns NoSignal signal when WAL archive queue can not be calculated' do
expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => nil }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
expect(evaluate.reason).to include('WAL archive queue can not be calculated')
end
@@ -45,7 +52,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
context 'when WAL archive queue size is below the limit' do
it 'returns Normal signal' do
expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => 1 }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Normal)
expect(evaluate.reason).to include('WAL archive queue is within limit')
end
end
@@ -53,7 +60,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
context 'when WAL archive queue size is above the limit' do
it 'returns Stop signal' do
expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => 420 }])
- expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ expect(evaluate).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
expect(evaluate.reason).to include('WAL archive queue is too big')
end
end
diff --git a/spec/lib/gitlab/database/health_status/logger_spec.rb b/spec/lib/gitlab/database/health_status/logger_spec.rb
new file mode 100644
index 00000000000..5ae6b40cb3a
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status/logger_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus::Logger, feature_category: :database do
+ subject { described_class.new('/dev/null') }
+
+ it_behaves_like 'a json logger', {}
+
+ it 'excludes context' do
+ expect(described_class.exclude_context?).to be(true)
+ end
+end
diff --git a/spec/lib/gitlab/database/health_status/signals_spec.rb b/spec/lib/gitlab/database/health_status/signals_spec.rb
new file mode 100644
index 00000000000..5bfd8ffb91e
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status/signals_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus::Signals, feature_category: :database do
+ shared_examples 'health status signal' do |subclass, stop_signal, log_signal|
+ let(:indicator) { instance_double('Gitlab::Database::HealthStatus::Indicators::PatroniApdex') }
+ let(:reason) { 'Test reason' }
+
+ subject { subclass.new(indicator, reason: reason) }
+
+ describe '#log_info?' do
+ it 'returns the log signal' do
+ expect(subject.log_info?).to eq(log_signal)
+ end
+ end
+
+ describe '#stop?' do
+ it 'returns the stop signal' do
+ expect(subject.stop?).to eq(stop_signal)
+ end
+ end
+ end
+
+ context 'with Stop signal it should stop and log' do
+ it_behaves_like 'health status signal', described_class::Stop, true, true
+ end
+
+ context 'with Normal signal it should not stop and log' do
+ it_behaves_like 'health status signal', described_class::Normal, false, false
+ end
+
+ context 'with NotAvailable signal it should not stop and log' do
+ it_behaves_like 'health status signal', described_class::NotAvailable, false, false
+ end
+
+ context 'with Unknown signal it should only log and not stop' do
+ it_behaves_like 'health status signal', described_class::Unknown, false, true
+ end
+end
diff --git a/spec/lib/gitlab/database/health_status_spec.rb b/spec/lib/gitlab/database/health_status_spec.rb
new file mode 100644
index 00000000000..bc923635b1d
--- /dev/null
+++ b/spec/lib/gitlab/database/health_status_spec.rb
@@ -0,0 +1,172 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::HealthStatus, feature_category: :database do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ describe '.evaluate' do
+ subject(:evaluate) { described_class.evaluate(health_context, [autovacuum_indicator_class]) }
+
+ let(:migration) { build(:batched_background_migration, :active) }
+ let(:health_context) { migration.health_context }
+
+ let(:health_status) { described_class }
+ let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable }
+ let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog }
+ let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex }
+ let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) }
+ let(:wal_indicator) { instance_double(wal_indicator_class) }
+ let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) }
+
+ before do
+ allow(autovacuum_indicator_class).to receive(:new).with(health_context).and_return(autovacuum_indicator)
+ end
+
+ context 'with default indicators' do
+ subject(:evaluate) { described_class.evaluate(health_context) }
+
+ it 'returns a collection of signals' do
+ normal_signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+ not_available_signal = instance_double("#{health_status}::Signals::NotAvailable", log_info?: false)
+
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal)
+ expect(wal_indicator_class).to receive(:new).with(health_context).and_return(wal_indicator)
+ expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal)
+ expect(patroni_apdex_indicator_class).to receive(:new).with(health_context)
+ .and_return(patroni_apdex_indicator)
+ expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal)
+
+ expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal)
+ end
+ end
+
+ it 'returns the signal of the given indicator' do
+ signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
+
+ expect(evaluate).to contain_exactly(signal)
+ end
+
+ context 'with stop signals' do
+ let(:stop_signal) do
+ instance_double(
+ "#{health_status}::Signals::Stop",
+ log_info?: true,
+ indicator_class: autovacuum_indicator_class,
+ short_name: 'Stop',
+ reason: 'Test Exception'
+ )
+ end
+
+ before do
+ allow(autovacuum_indicator).to receive(:evaluate).and_return(stop_signal)
+ end
+
+ context 'with batched migrations as the status checker' do
+ it 'captures BatchedMigration class name in the log' do
+ expect(Gitlab::Database::HealthStatus::Logger).to receive(:info).with(
+ status_checker_id: migration.id,
+ status_checker_type: 'Gitlab::Database::BackgroundMigration::BatchedMigration',
+ job_class_name: migration.job_class_name,
+ health_status_indicator: autovacuum_indicator_class.to_s,
+ indicator_signal: 'Stop',
+ signal_reason: 'Test Exception',
+ message: "#{migration} signaled: #{stop_signal}"
+ )
+
+ evaluate
+ end
+ end
+
+ context 'with sidekiq deferred job as the status checker' do
+ let(:deferred_worker) do
+ Class.new do
+ def self.name
+ 'TestDeferredWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ let(:deferred_worker_health_checker) do
+ Gitlab::SidekiqMiddleware::DeferJobs::DatabaseHealthStatusChecker.new(
+ 123,
+ deferred_worker.name
+ )
+ end
+
+ let(:health_context) do
+ Gitlab::Database::HealthStatus::Context.new(
+ deferred_worker_health_checker,
+ ActiveRecord::Base.connection,
+ :gitlab_main,
+ [:users]
+ )
+ end
+
+ it 'captures sidekiq job class in the log' do
+ expect(Gitlab::Database::HealthStatus::Logger).to receive(:info).with(
+ status_checker_id: deferred_worker_health_checker.id,
+ status_checker_type: 'Gitlab::SidekiqMiddleware::DeferJobs::DatabaseHealthStatusChecker',
+ job_class_name: deferred_worker_health_checker.job_class_name,
+ health_status_indicator: autovacuum_indicator_class.to_s,
+ indicator_signal: 'Stop',
+ signal_reason: 'Test Exception',
+ message: "#{deferred_worker_health_checker} signaled: #{stop_signal}"
+ )
+
+ evaluate
+ end
+ end
+ end
+
+ it 'does not log signals of no interest' do
+ signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
+ expect(described_class).not_to receive(:log_signal)
+
+ evaluate
+ end
+
+ context 'on indicator error' do
+ let(:error) { RuntimeError.new('everything broken') }
+
+ before do
+ allow(autovacuum_indicator).to receive(:evaluate).and_raise(error)
+ end
+
+ it 'does not fail' do
+ expect { evaluate }.not_to raise_error
+ end
+
+ it 'returns Unknown signal' do
+ signal = evaluate.first
+
+ expect(signal).to be_an_instance_of(Gitlab::Database::HealthStatus::Signals::Unknown)
+ expect(signal.reason).to eq("unexpected error: everything broken (RuntimeError)")
+ end
+
+ it 'reports the exception to error tracking' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(
+ error,
+ status_checker_id: migration.id,
+ status_checker_type: 'Gitlab::Database::BackgroundMigration::BatchedMigration',
+ job_class_name: migration.job_class_name
+ )
+
+ evaluate
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb
index b040c7a76bd..caae06ce43a 100644
--- a/spec/lib/gitlab/database/load_balancing/host_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb
@@ -195,6 +195,40 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host).to be_online
end
+
+ it 'clears the cache for latest_lsn_query' do
+ allow(host).to receive(:replica_is_up_to_date?).and_return(true)
+
+ expect(host)
+ .to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .twice
+ .and_return({ 'allowed' => 't' }, { 'allowed' => 'f' })
+
+ # Should receive LATEST_LSN_WITH_LOGICAL_QUERY twice even though we only
+ # return 't' once above
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITH_LOGICAL_QUERY))
+ .twice
+ .and_call_original
+
+ host.replication_lag_size
+ host.replication_lag_size
+
+ # Clear the cache for latest_lsn_query
+ host.refresh_status
+
+ # Should recieve LATEST_LSN_WITHOUT_LOGICAL_QUERY since we received 'f'
+ # after clearing the cache
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITHOUT_LOGICAL_QUERY))
+ .once
+ .and_call_original
+
+ host.replication_lag_size
+ end
end
describe '#check_replica_status?' do
@@ -289,6 +323,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host)
.to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_call_original
+
+ expect(host)
+ .to receive(:query_and_release)
.and_return({ 'diff' => diff })
expect(host.data_is_recent_enough?).to eq(false)
@@ -325,6 +364,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
it 'returns nil when the database query returned no rows' do
expect(host)
.to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_call_original
+
+ expect(host)
+ .to receive(:query_and_release)
.and_return({})
expect(host.replication_lag_size).to be_nil
@@ -339,6 +383,54 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
expect(host.replication_lag_size).to be_nil
end
+
+ context 'when can_track_logical_lsn? is false' do
+ before do
+ allow(host).to receive(:can_track_logical_lsn?).and_return(false)
+ end
+
+ it 'uses LATEST_LSN_WITHOUT_LOGICAL_QUERY' do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITHOUT_LOGICAL_QUERY))
+ .and_call_original
+
+ expect(host.replication_lag_size('0/00000000')).to be_an_instance_of(Integer)
+ end
+ end
+
+ context 'when can_track_logical_lsn? is true' do
+ before do
+ allow(host).to receive(:can_track_logical_lsn?).and_return(true)
+ end
+
+ it 'uses LATEST_LSN_WITH_LOGICAL_QUERY' do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITH_LOGICAL_QUERY))
+ .and_call_original
+
+ expect(host.replication_lag_size('0/00000000')).to be_an_instance_of(Integer)
+ end
+ end
+
+ context 'when CAN_TRACK_LOGICAL_LSN_QUERY raises connection errors' do
+ before do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_raise(ActiveRecord::ConnectionNotEstablished)
+ end
+
+ it 'uses LATEST_LSN_WITHOUT_LOGICAL_QUERY' do
+ expect(host)
+ .to receive(:query_and_release)
+ .with(a_string_including(described_class::LATEST_LSN_WITHOUT_LOGICAL_QUERY))
+ .and_call_original
+
+ expect(host.replication_lag_size('0/00000000')).to be_an_instance_of(Integer)
+ end
+ end
end
describe '#primary_write_location' do
@@ -357,28 +449,41 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
it 'returns true when a host has caught up' do
allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => 't' }])
- expect(host.caught_up?('foo')).to eq(true)
- end
+ expect(connection)
+ .to receive(:select_all)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_return([{ 'has_table_privilege' => 't' }])
- it 'returns true when a host has caught up' do
- allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => true }])
+ expect(connection)
+ .to receive(:select_all)
+ .and_return([{ 'diff' => -1 }])
expect(host.caught_up?('foo')).to eq(true)
end
- it 'returns false when a host has not caught up' do
+ it 'returns false when diff query returns nothing' do
allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => 'f' }])
+
+ expect(connection)
+ .to receive(:select_all)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_return([{ 'has_table_privilege' => 't' }])
+
+ expect(connection).to receive(:select_all).and_return([])
expect(host.caught_up?('foo')).to eq(false)
end
it 'returns false when a host has not caught up' do
allow(host).to receive(:connection).and_return(connection)
- expect(connection).to receive(:select_all).and_return([{ 'result' => false }])
+
+ expect(connection)
+ .to receive(:select_all)
+ .with(described_class::CAN_TRACK_LOGICAL_LSN_QUERY)
+ .and_return([{ 'has_table_privilege' => 't' }])
+
+ expect(connection).to receive(:select_all).and_return([{ 'diff' => 123 }])
expect(host.caught_up?('foo')).to eq(false)
end
diff --git a/spec/lib/gitlab/database/lock_writes_manager_spec.rb b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
index 2aa95372338..899f3760132 100644
--- a/spec/lib/gitlab/database/lock_writes_manager_spec.rb
+++ b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
@@ -55,7 +55,9 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :
describe '#lock_writes' do
it 'prevents any writes on the table' do
- subject.lock_writes
+ expect(subject.lock_writes).to eq(
+ { action: "locked", database: "main", dry_run: dry_run, table: test_table }
+ )
expect do
connection.execute("delete from #{test_table}")
@@ -116,19 +118,13 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :
expect(connection).not_to receive(:execute).with(/CREATE TRIGGER/)
expect do
- subject.lock_writes
+ result = subject.lock_writes
+ expect(result).to eq({ action: "skipped", database: "main", dry_run: false, table: test_table })
end.not_to change {
number_of_triggers_on(connection, test_table)
}
end
- it 'returns result hash with action skipped' do
- subject.lock_writes
-
- expect(subject.lock_writes).to eq({ action: "skipped", database: "main", dry_run: false,
-table: test_table })
- end
-
context 'when running in dry_run mode' do
let(:dry_run) { true }
@@ -154,9 +150,10 @@ table: test_table })
end.not_to raise_error
end
- it 'returns result hash with action locked' do
- expect(subject.lock_writes).to eq({ action: "locked", database: "main", dry_run: dry_run,
-table: test_table })
+ it 'returns result hash with action needs_lock' do
+ expect(subject.lock_writes).to eq(
+ { action: "needs_lock", database: "main", dry_run: true, table: test_table }
+ )
end
end
end
@@ -175,13 +172,24 @@ table: test_table })
end
it 'allows writing on the table again' do
- subject.unlock_writes
+ expect(subject.unlock_writes).to eq(
+ { action: "unlocked", database: "main", dry_run: dry_run, table: test_table }
+ )
expect do
connection.execute("delete from #{test_table}")
end.not_to raise_error
end
+ it 'skips unlocking the table if the table was already unlocked for writes' do
+ subject.unlock_writes
+
+ expect(subject).not_to receive(:execute_sql_statement)
+ expect(subject.unlock_writes).to eq(
+ { action: "skipped", database: "main", dry_run: dry_run, table: test_table }
+ )
+ end
+
it 'removes the write protection triggers from the gitlab_main tables on the ci database' do
expect do
subject.unlock_writes
@@ -198,11 +206,6 @@ table: test_table })
subject.unlock_writes
end
- it 'returns result hash with action unlocked' do
- expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run,
-table: test_table })
- end
-
context 'when running in dry_run mode' do
let(:dry_run) { true }
@@ -225,8 +228,9 @@ table: test_table })
end
it 'returns result hash with dry_run true' do
- expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run,
-table: test_table })
+ expect(subject.unlock_writes).to eq(
+ { action: "needs_unlock", database: "main", dry_run: true, table: test_table }
+ )
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
index faf0447c054..37075c4d2df 100644
--- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
@@ -78,13 +78,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
}
}
},
- "does add column to ci_builds in gitlab_main and gitlab_ci" => {
+ "does add column to p_ci_builds in gitlab_main and gitlab_ci" => {
migration: ->(klass) do
def change
- add_column :ci_builds, :__test_column, :integer
+ add_column :p_ci_builds, :__test_column, :integer
end
end,
- query_matcher: /ALTER TABLE "ci_builds" ADD "__test_column" integer/,
+ query_matcher: /ALTER TABLE "p_ci_builds" ADD "__test_column" integer/,
expected: {
no_gitlab_schema: {
main: :success,
diff --git a/spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb b/spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb
new file mode 100644
index 00000000000..1cc4ff6891c
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/wraparound_autovacuum_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::WraparoundAutovacuum, feature_category: :database do
+ include Database::DatabaseHelpers
+
+ let(:migration) do
+ Class.new(Gitlab::Database::Migration[2.1])
+ .include(described_class)
+ .new
+ end
+
+ describe '#can_execute_on?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:dot_com, :dev_or_test, :wraparound_prevention, :expectation) do
+ true | true | true | false
+ true | false | true | false
+ false | true | true | false
+ false | false | true | false
+ true | true | false | true
+ true | false | false | true
+ false | true | false | true
+ false | false | false | false
+ end
+
+ with_them do
+ it 'returns true for GitLab.com, dev, or test' do
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(dev_or_test)
+ allow(migration).to receive(:wraparound_prevention_on_tables?).with([:table]).and_return(wraparound_prevention)
+
+ expect(migration.can_execute_on?(:table)).to eq(expectation)
+ end
+ end
+ end
+
+ describe '#wraparound_prevention_on_tables?' do
+ before do
+ swapout_view_for_table(:postgres_autovacuum_activity, connection: ApplicationRecord.connection)
+ create(:postgres_autovacuum_activity, table: 'foo', wraparound_prevention: false)
+ create(:postgres_autovacuum_activity, table: 'bar', wraparound_prevention: true)
+ end
+
+ it { expect(migration.wraparound_prevention_on_tables?([:foo])).to be_falsey }
+ it { expect(migration.wraparound_prevention_on_tables?([:bar])).to be_truthy }
+ it { expect(migration.wraparound_prevention_on_tables?([:foo, :bar])).to be_truthy }
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index f5ce207773f..82f77d2bb19 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -428,21 +428,24 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
describe '#ensure_batched_background_migration_is_finished' do
let(:job_class_name) { 'CopyColumnUsingBackgroundMigrationJob' }
- let(:table) { :events }
+ let(:table_name) { 'events' }
let(:column_name) { :id }
let(:job_arguments) { [["id"], ["id_convert_to_bigint"], nil] }
+ let(:gitlab_schema) { Gitlab::Database::GitlabSchema.table_schema!(table_name) }
let(:configuration) do
{
job_class_name: job_class_name,
- table_name: table,
+ table_name: table_name,
column_name: column_name,
job_arguments: job_arguments
}
end
let(:migration_attributes) do
- configuration.merge(gitlab_schema: Gitlab::Database.gitlab_schemas_for_connection(migration.connection).first)
+ configuration.merge(
+ gitlab_schema: gitlab_schema
+ )
end
before do
@@ -457,7 +460,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
create(:batched_background_migration, :active, migration_attributes)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- allow(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(false)
+ allow(runner).to receive(:finalize).with(job_class_name, table_name, column_name, job_arguments).and_return(false)
end
expect { ensure_batched_background_migration_is_finished }
@@ -530,7 +533,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
migration = create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(migration.finish!)
+ expect(runner).to receive(:finalize).with(job_class_name, table_name, column_name, job_arguments).and_return(migration.finish!)
end
ensure_batched_background_migration_is_finished
@@ -543,7 +546,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).not_to receive(:finalize).with(job_class_name, table, column_name, job_arguments)
+ expect(runner).not_to receive(:finalize).with(job_class_name, table_name, column_name, job_arguments)
end
expect { migration.ensure_batched_background_migration_is_finished(**configuration.merge(finalize: false)) }.to raise_error(RuntimeError)
diff --git a/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
index 07d913cf5cc..476b5f3a784 100644
--- a/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
@@ -679,4 +679,43 @@ RSpec.describe Gitlab::Database::Migrations::ConstraintsHelpers do
end
end
end
+
+ describe '#switch_constraint_names' do
+ before do
+ ActiveRecord::Migration.connection.create_table(:_test_table) do |t|
+ t.references :supplier, foreign_key: { to_table: :_test_table, name: :supplier_fk }
+ t.references :customer, foreign_key: { to_table: :_test_table, name: :customer_fk }
+ end
+ end
+
+ context 'when inside a transaction' do
+ it 'raises an error' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ model.switch_constraint_names(:_test_table, :supplier_fk, :customer_fk)
+ end.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'when outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'executes the statement to swap the constraint names' do
+ expect { model.switch_constraint_names(:_test_table, :supplier_fk, :customer_fk) }
+ .to change { constrained_column_for(:customer_fk) }.from(:customer_id).to(:supplier_id)
+ .and change { constrained_column_for(:supplier_fk) }.from(:supplier_id).to(:customer_id)
+ end
+
+ def constrained_column_for(fk_name)
+ Gitlab::Database::PostgresForeignKey
+ .find_by!(referenced_table_name: :_test_table, name: fk_name)
+ .constrained_columns
+ .first
+ .to_sym
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
index e48937037fa..7899c1588b2 100644
--- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -16,7 +16,9 @@ RSpec.describe 'cross-database foreign keys' do
end
def is_cross_db?(fk_record)
- Gitlab::Database::GitlabSchema.table_schemas!([fk_record.from_table, fk_record.to_table]).many?
+ table_schemas = Gitlab::Database::GitlabSchema.table_schemas!([fk_record.from_table, fk_record.to_table])
+
+ !Gitlab::Database::GitlabSchema.cross_foreign_key_allowed?(table_schemas)
end
it 'onlies have allowed list of cross-database foreign keys', :aggregate_failures do
diff --git a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
index 8e2a53ea76f..b30501cce21 100644
--- a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
@@ -15,8 +15,7 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
table_name: table_name,
partitioning_column: partitioning_column,
parent_table_name: parent_table_name,
- zero_partition_value: partitioning_default,
- lock_tables: lock_tables
+ zero_partition_value: partitioning_default
)
end
@@ -227,16 +226,6 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
end
end
- context 'with locking tables' do
- let(:lock_tables) { [table_name] }
-
- it 'locks the table' do
- recorder = ActiveRecord::QueryRecorder.new { partition }
-
- expect(recorder.log).to include(/LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE/)
- end
- end
-
context 'when an error occurs during the conversion' do
before do
# Set up the fault that we'd like to inject
@@ -264,7 +253,6 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
with_them do
it 'recovers from a fault', :aggregate_failures do
expect { converter.partition }.to raise_error(/fault/)
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(0)
expect { converter.partition }.not_to raise_error
expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
@@ -286,26 +274,6 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
end
-
- context 'with locking tables' do
- let(:lock_tables) { [table_name] }
-
- it 'locks the table before dropping the triggers' do
- recorder = ActiveRecord::QueryRecorder.new { partition }
-
- lock_index = recorder.log.find_index do |log|
- log.start_with?('LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE')
- end
-
- trigger_index = recorder.log.find_index do |log|
- log.start_with?('DROP TRIGGER IF EXISTS _test_table_to_partition_loose_fk_trigger')
- end
-
- expect(lock_index).to be_present
- expect(trigger_index).to be_present
- expect(lock_index).to be < trigger_index
- end
- end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index e6014f81b74..5b6967c2d14 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -2,10 +2,15 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
+RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy, feature_category: :database do
+ include Gitlab::Database::DynamicModelHelpers
+
let(:connection) { ActiveRecord::Base.connection }
- let(:table_name) { :_test_partitioned_test }
- let(:model) { double('model', table_name: table_name, ignored_columns: %w[partition], connection: connection) }
+ let(:table_name) { '_test_partitioned_test' }
+ let(:model) do
+ define_batchable_model(table_name, connection: connection).tap { |m| m.ignored_columns = %w[partition] }
+ end
+
let(:next_partition_if) { double('next_partition_if') }
let(:detach_partition_if) { double('detach_partition_if') }
@@ -87,6 +92,31 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
strategy.validate_and_fix
end
+
+ context 'when the shared connection is for the wrong database' do
+ it 'does not attempt to fix connections' do
+ skip_if_shared_database(:ci)
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+
+ Ci::ApplicationRecord.connection.execute(<<~SQL)
+ create table #{table_name}
+ (
+ id serial not null,
+ partition bigint not null default 1,
+ created_at timestamptz not null,
+ primary key (id, partition)
+ )
+ partition by list(partition);
+
+ create table #{table_name}_1
+ partition of #{table_name} for values in (1);
+ SQL
+
+ Gitlab::Database::SharedModel.using_connection(Ci::ApplicationRecord.connection) do
+ strategy.validate_and_fix
+ end
+ end
+ end
end
describe '#active_partition' do
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index d5f4afd7ba4..5f1e8842f18 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -228,6 +228,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
end
it 'validates FK for each partition' do
+ allow(migration).to receive(:statement_timeout_disabled?).and_return(false)
expect(migration).to receive(:execute).with(/SET statement_timeout TO 0/).twice
expect(migration).to receive(:execute).with(/RESET statement_timeout/).twice
expect(migration).to receive(:execute)
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 571c67db597..6a947044317 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -68,7 +68,6 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
describe '#convert_table_to_first_list_partition' do
it_behaves_like 'delegates to ConvertTable' do
let(:lock_tables) { [source_table] }
- let(:extra_options) { { lock_tables: lock_tables } }
let(:expected_method) { :partition }
let(:migrate) do
migration.convert_table_to_first_list_partition(table_name: source_table,
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index 9df238a0024..8724716dd3d 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -112,6 +112,24 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
end
end
+ context 'without ci database' do
+ it 'only creates partitions for main database' do
+ skip_if_database_exists(:ci)
+
+ allow(Gitlab::Database::Partitioning::PartitionManager).to receive(:new).and_call_original
+
+ # Also, in the case where `ci` database is shared with `main` database,
+ # check that we do not run PartitionManager again for ci connection as
+ # that is redundant.
+ expect(Gitlab::Database::Partitioning::PartitionManager).not_to receive(:new)
+ .with(anything, connection: ci_connection).and_call_original
+
+ expect { described_class.sync_partitions(models) }
+ .to change { find_partitions(table_names.first, conn: main_connection).size }.from(0)
+ .and change { find_partitions(table_names.last, conn: main_connection).size }.from(0)
+ end
+ end
+
context 'when no partitioned models are given' do
it 'manages partitions for each registered model' do
described_class.register_models([models.first])
@@ -247,6 +265,18 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
.and change { table_exists?(table_names.last) }.from(true).to(false)
end
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(partition_manager_sync_partitions: false)
+ end
+
+ it 'does not call the DetachedPartitionDropper' do
+ expect(Gitlab::Database::Partitioning::DetachedPartitionDropper).not_to receive(:new)
+
+ described_class.drop_detached_partitions
+ end
+ end
+
def table_exists?(table_name)
table_oid(table_name).present?
end
diff --git a/spec/lib/gitlab/database/pg_depend_spec.rb b/spec/lib/gitlab/database/pg_depend_spec.rb
index 547a2c84b76..ff5169ebabf 100644
--- a/spec/lib/gitlab/database/pg_depend_spec.rb
+++ b/spec/lib/gitlab/database/pg_depend_spec.rb
@@ -13,8 +13,14 @@ RSpec.describe Gitlab::Database::PgDepend, type: :model, feature_category: :data
connection.execute('CREATE EXTENSION IF NOT EXISTS pg_stat_statements;')
end
- it 'returns pg_stat_statements', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410508' do
- expect(subject.pluck('relname')).to eq(['pg_stat_statements'])
+ it 'returns pg_stat_statements' do
+ expected_views = ['pg_stat_statements']
+
+ if Gitlab::Database::Reflection.new(described_class).version.to_f >= 14
+ expected_views << 'pg_stat_statements_info' # View added by pg_stat_statements starting in postgres 14
+ end
+
+ expect(subject.pluck('relname')).to match_array(expected_views)
end
end
end
diff --git a/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
index f24c4559349..5367cf1fb9b 100644
--- a/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
+++ b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
@@ -28,5 +28,15 @@ RSpec.describe Gitlab::Database::PostgresAutovacuumActivity, type: :model, featu
it 'returns autovacuum activity for queries tables' do
expect(subject.map(&:table).sort).to eq(tables)
end
+
+ it 'executes the query' do
+ is_expected.to be_a Array
+ end
+ end
+
+ describe '.wraparound_prevention' do
+ subject { described_class.wraparound_prevention }
+
+ it { expect(subject.where_values_hash).to match(a_hash_including('wraparound_prevention' => true)) }
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index 6a0c4226db8..b5e08f58608 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -7,6 +7,9 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
before do
allow(Gitlab::Database::QueryAnalyzer.instance).to receive(:all_analyzers).and_return([analyzer])
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE INDEX index_on_projects ON public.projects USING gin (name gin_trgm_ops)
+ SQL
end
it 'does not increment metrics if feature flag is disabled' do
@@ -59,6 +62,11 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
sql: "SELECT 1 FROM projects LEFT JOIN not_in_schema ON not_in_schema.project_id=projects.id",
expect_error:
/Could not find gitlab schema for table not_in_schema/
+ },
+ "for query altering an INDEX" => {
+ model: ApplicationRecord,
+ sql: "ALTER INDEX index_on_projects SET ( fastupdate = false )",
+ no_op: true
}
}
end
@@ -74,6 +82,10 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
if expect_error
expect { process_sql(model, sql) }.to raise_error(expect_error)
+ elsif no_op
+ expect(described_class.schemas_metrics).not_to receive(:increment)
+
+ process_sql(model, sql)
else
expect(described_class.schemas_metrics).to receive(:increment)
.with(expectations).and_call_original
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
index 02bd6b51463..3ccdb907cba 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
@@ -57,13 +57,19 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
end
- shared_examples 'cross-database modification errors' do |model:|
+ shared_examples 'cross-database modification errors' do |model:, sql_log_contains:|
let(:model) { model }
context "within #{model} transaction" do
it 'raises error' do
model.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
+ expect { run_queries }.to raise_error do |error|
+ expect(error.message).to include 'Cross-database data modification'
+
+ sql_log_contains.each do |sql_query|
+ expect(error.message).to match sql_query
+ end
+ end
end
end
end
@@ -87,7 +93,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
include_examples 'successful examples', model: Ci::Pipeline
- include_examples 'cross-database modification errors', model: Project
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "ci_pipelines"/]
end
context 'when other data is modified' do
@@ -98,7 +105,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
include_examples 'successful examples', model: Project
- include_examples 'cross-database modification errors', model: Ci::Pipeline
+ include_examples 'cross-database modification errors', model: Ci::Pipeline,
+ sql_log_contains: [/UPDATE "projects"/]
end
context 'when both CI and other data is modified' do
@@ -112,11 +120,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
context 'when data modification happens in a transaction' do
- it 'raises error' do
- Project.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
- end
- end
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "projects"/, /UPDATE "ci_pipelines"/]
context 'when ci_pipelines are ignored for cross modification' do
it 'does not raise error' do
@@ -131,11 +136,16 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
context 'when data modification happens in nested transactions' do
- it 'raises error' do
+ it 'raises error, with the generated sql queries included' do
Project.transaction(requires_new: true) do
project.touch
Project.transaction(requires_new: true) do
- expect { pipeline.touch }.to raise_error /Cross-database data modification/
+ expect { pipeline.touch }.to raise_error do |error|
+ expect(error.message).to include('Cross-database data modification')
+
+ expect(error.message).to match(/UPDATE "projects"/)
+ expect(error.message).to match(/UPDATE "ci_pipelines"/)
+ end
end
end
end
@@ -151,11 +161,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
Marginalia::Comment.prepend_comment = prepend_comment_was
end
- it 'raises error' do
- Project.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
- end
- end
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "projects"/, /UPDATE "ci_pipelines"/]
end
end
@@ -170,11 +177,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
context 'when data modification happens in a transaction' do
- it 'raises error' do
- Project.transaction do
- expect { run_queries }.to raise_error /Cross-database data modification/
- end
- end
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/UPDATE "projects"/, /SELECT "ci_pipelines"."id".*FOR UPDATE/]
context 'when the modification is inside a factory save! call' do
let(:runner) { create(:ci_runner, :project, projects: [build(:project)]) }
@@ -194,7 +198,8 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
include_examples 'successful examples', model: Ci::Pipeline
- include_examples 'cross-database modification errors', model: Project
+ include_examples 'cross-database modification errors', model: Project,
+ sql_log_contains: [/INSERT INTO "ci_variables"/]
end
describe '.allow_cross_database_modification_within_transaction' do
diff --git a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
index 261bef58bb6..b90f60e0301 100644
--- a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas, query_analyzers: false do
+RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas,
+ query_analyzers: false, feature_category: :database do
let(:analyzer) { described_class }
context 'properly analyzes queries' do
@@ -15,14 +16,38 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas, query_a
expected_allowed_gitlab_schemas: {
no_schema: :dml_not_allowed,
gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
gitlab_ci: :dml_access_denied # cross-schema access
}
},
- "for INSERT" => {
+ "for SELECT on namespaces" => {
+ sql: "SELECT 1 FROM namespaces",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for INSERT on projects" => {
sql: "INSERT INTO projects VALUES (1)",
expected_allowed_gitlab_schemas: {
no_schema: :dml_not_allowed,
gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
+ gitlab_ci: :dml_access_denied # cross-schema access
+ }
+ },
+ "for INSERT on namespaces" => {
+ sql: "INSERT INTO namespaces VALUES (1)",
+ expected_allowed_gitlab_schemas: {
+ no_schema: :dml_not_allowed,
+ gitlab_main: :success,
+ gitlab_main_clusterwide: :success,
+ gitlab_main_cell: :success,
gitlab_ci: :dml_access_denied # cross-schema access
}
},
diff --git a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
index e82a2ab467d..f1d88615762 100644
--- a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection, feature_category: :
expect(subject).not_to include(excluded.index)
end
- it 'excludes indexes smaller than 1 GB ondisk size' do
+ it 'excludes indexes smaller than 1 GiB ondisk size' do
excluded = create(
:postgres_index_bloat_estimate,
index: create(:postgres_index, ondisk_size_bytes: 0.99.gigabytes),
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection, feature_category: :
expect(subject).not_to include(excluded.index)
end
- it 'includes indexes larger than 100 GB ondisk size' do
+ it 'includes indexes larger than 100 GiB ondisk size' do
included = create(
:postgres_index_bloat_estimate,
index: create(:postgres_index, ondisk_size_bytes: 101.gigabytes),
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 2cb84e2f02a..370d03b495c 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete, feature_category: :subgroups do
+RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete, feature_category: :groups_and_projects do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index 5b5661020b0..b00a1d4a9e1 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :delete,
-feature_category: :subgroups do
+feature_category: :groups_and_projects do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
let(:namespace) { create(:group, name: 'the-path') }
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 787c9e87038..d2665664fb0 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :delete,
-feature_category: :projects do
+feature_category: :groups_and_projects do
let(:migration) { FakeRenameReservedPathMigrationV1.new }
let(:subject) { described_class.new(['the-path'], migration) }
let(:project) do
diff --git a/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb
new file mode 100644
index 00000000000..cfe5572fb51
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_database_adapter_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ForeignKeyDatabaseAdapter, feature_category: :database do
+ subject(:adapter) { described_class.new(query_result) }
+
+ let(:query_result) do
+ {
+ 'schema' => 'public',
+ 'foreign_key_name' => 'fk_2e88fb7ce9',
+ 'table_name' => 'members',
+ 'foreign_key_definition' => 'FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE'
+ }
+ end
+
+ describe '#name' do
+ it { expect(adapter.name).to eq('public.fk_2e88fb7ce9') }
+ end
+
+ describe '#table_name' do
+ it { expect(adapter.table_name).to eq('members') }
+ end
+
+ describe '#statement' do
+ it { expect(adapter.statement).to eq('FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE') }
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb
new file mode 100644
index 00000000000..f7ae0c0f892
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/adapters/foreign_key_structure_sql_adapter_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ForeignKeyStructureSqlAdapter, feature_category: :database do
+ subject(:adapter) { described_class.new(stmt) }
+
+ let(:stmt) { PgQuery.parse(sql).tree.stmts.first.stmt.alter_table_stmt }
+
+ where(:sql, :name, :table_name, :statement) do
+ [
+ [
+ 'ALTER TABLE ONLY public.issues ADD CONSTRAINT fk_05f1e72feb FOREIGN KEY (author_id) REFERENCES users (id) ' \
+ 'ON DELETE SET NULL',
+ 'public.fk_05f1e72feb',
+ 'issues',
+ 'FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL'
+ ],
+ [
+ 'ALTER TABLE public.import_failures ADD CONSTRAINT fk_9a9b9ba21c FOREIGN KEY (user_id) REFERENCES users(id) ' \
+ 'ON DELETE CASCADE',
+ 'public.fk_9a9b9ba21c',
+ 'import_failures',
+ 'FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE'
+ ]
+ ]
+ end
+
+ with_them do
+ describe '#name' do
+ it { expect(adapter.name).to eq(name) }
+ end
+
+ describe '#table_name' do
+ it { expect(adapter.table_name).to eq(table_name) }
+ end
+
+ describe '#statement' do
+ it { expect(adapter.statement).to eq(statement) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
index 7d6a279def9..fbaf8474f22 100644
--- a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
+++ b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb
@@ -13,5 +13,29 @@ RSpec.describe Gitlab::Database::SchemaValidation::SchemaInconsistency, type: :m
it { is_expected.to validate_presence_of(:object_name) }
it { is_expected.to validate_presence_of(:valitador_name) }
it { is_expected.to validate_presence_of(:table_name) }
+ it { is_expected.to validate_presence_of(:diff) }
+ end
+
+ describe 'scopes' do
+ describe '.with_open_issues' do
+ subject(:inconsistencies) { described_class.with_open_issues }
+
+ let(:closed_issue) { create(:issue, :closed) }
+ let(:open_issue) { create(:issue, :opened) }
+
+ let!(:schema_inconsistency_with_issue_closed) do
+ create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
+ valitador_name: 'different_definition_indexes', issue: closed_issue)
+ end
+
+ let!(:schema_inconsistency_with_issue_opened) do
+ create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
+ valitador_name: 'different_definition_indexes', issue: open_issue)
+ end
+
+ it 'returns only schema inconsistencies with GitLab issues open' do
+ expect(inconsistencies).to eq([schema_inconsistency_with_issue_opened])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb
new file mode 100644
index 00000000000..7500ad44f82
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/schema_objects/foreign_key_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::ForeignKey, feature_category: :database do
+ subject(:foreign_key) { described_class.new(adapter) }
+
+ let(:database_adapter) { 'Gitlab::Database::SchemaValidation::Adapters::ForeignKeyDatabaseAdapter' }
+ let(:adapter) do
+ instance_double(database_adapter, name: 'public.fk_1d37cddf91', table_name: 'vulnerabilities',
+ statement: 'FOREIGN KEY (epic_id) REFERENCES epics(id) ON DELETE SET NULL')
+ end
+
+ describe '#name' do
+ it { expect(foreign_key.name).to eq('public.fk_1d37cddf91') }
+ end
+
+ describe '#table_name' do
+ it { expect(foreign_key.table_name).to eq('vulnerabilities') }
+ end
+
+ describe '#statement' do
+ it { expect(foreign_key.statement).to eq('FOREIGN KEY (epic_id) REFERENCES epics(id) ON DELETE SET NULL') }
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb
index 84db721fc2d..0b104e40c11 100644
--- a/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb
+++ b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb
@@ -24,10 +24,6 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
subject(:execute) { described_class.new(inconsistency, project, user).execute }
- before do
- stub_spam_services
- end
-
context 'when is not GitLab.com' do
it 'does not create a schema inconsistency record' do
allow(Gitlab).to receive(:com?).and_return(false)
@@ -39,7 +35,12 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
context 'when the issue creation fails' do
let(:issue_creation) { instance_double(Mutations::Issues::Create, resolve: { errors: 'error' }) }
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
allow(Mutations::Issues::Create).to receive(:new).and_return(issue_creation)
end
@@ -51,7 +52,12 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
end
context 'when a new inconsistency is found' do
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
project.add_developer(user)
end
@@ -63,19 +69,116 @@ RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_c
end
context 'when the schema inconsistency already exists' do
- before do
- project.add_developer(user)
+ let(:diff) do
+ "-#{structure_sql_statement}\n" \
+ "+#{database_statement}\n"
end
let!(:schema_inconsistency) do
create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
- valitador_name: 'different_definition_indexes')
+ valitador_name: 'different_definition_indexes', diff: diff)
end
- it 'does not create a schema inconsistency record' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when the issue has the last schema inconsistency' do
+ it 'does not add a note' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ expect { execute }.not_to change { schema_inconsistency.issue.notes.count }
+ end
+ end
+
+ context 'when the issue is outdated' do
+ let!(:schema_inconsistency) do
+ create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements',
+ valitador_name: 'different_definition_indexes', diff: 'old_diff')
+ end
+
+ it 'adds a note' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ expect { execute }.to change { schema_inconsistency.issue.notes.count }.from(0).to(1)
+ end
+
+ it 'updates the diff' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ execute
+
+ expect(schema_inconsistency.reload.diff).to eq(diff)
+ end
+ end
+
+ context 'when the GitLab issue is open' do
+ it 'does not create a new schema inconsistency record' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ schema_inconsistency.issue.update!(state_id: Issue.available_states[:opened])
+
+ expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count }
+ end
+ end
+
+ context 'when the GitLab is not open' do
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
+ before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
+ project.add_developer(user)
+ end
+
+ it 'creates a new schema inconsistency record' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ schema_inconsistency.issue.update!(state_id: Issue.available_states[:closed])
+
+ expect { execute }.to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count }
+ end
+ end
+ end
+
+ context 'when the dictionary file is not present' do
+ before do
+ allow(Gitlab::Database::GitlabSchema).to receive(:dictionary_paths).and_return(['dictionary_not_found_path/'])
+
+ project.add_developer(user)
+ end
+
+ it 'add the default labels' do
allow(Gitlab).to receive(:com?).and_return(true)
- expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count }
+ inconsistency = execute
+
+ labels = inconsistency.issue.labels.map(&:name)
+
+ expect(labels).to eq %w[database database-inconsistency-report type::maintenance severity::4]
+ end
+ end
+
+ context 'when dictionary feature_categories are available' do
+ let(:convert_object) do
+ instance_double('Gitlab::Database::ConvertFeatureCategoryToGroupLabel', execute: 'group_label')
+ end
+
+ before do
+ allow(Gitlab::Database::ConvertFeatureCategoryToGroupLabel).to receive(:new).and_return(convert_object)
+
+ allow(Gitlab::Database::GitlabSchema).to receive(:dictionary_paths).and_return(['spec/fixtures/'])
+
+ project.add_developer(user)
+ end
+
+ it 'add the default labels + group labels' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ inconsistency = execute
+
+ labels = inconsistency.issue.labels.map(&:name)
+
+ expect(labels).to eq %w[database database-inconsistency-report type::maintenance severity::4 group_label]
end
end
end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb
index 036ad6424f0..e8c08277d52 100644
--- a/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb
+++ b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb
@@ -12,13 +12,16 @@ RSpec.describe Gitlab::Database::SchemaValidation::Validators::BaseValidator, fe
Gitlab::Database::SchemaValidation::Validators::ExtraTableColumns,
Gitlab::Database::SchemaValidation::Validators::ExtraIndexes,
Gitlab::Database::SchemaValidation::Validators::ExtraTriggers,
+ Gitlab::Database::SchemaValidation::Validators::ExtraForeignKeys,
Gitlab::Database::SchemaValidation::Validators::MissingTables,
Gitlab::Database::SchemaValidation::Validators::MissingTableColumns,
Gitlab::Database::SchemaValidation::Validators::MissingIndexes,
Gitlab::Database::SchemaValidation::Validators::MissingTriggers,
+ Gitlab::Database::SchemaValidation::Validators::MissingForeignKeys,
Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTables,
Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes,
- Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTriggers
+ Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTriggers,
+ Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionForeignKeys
])
end
end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb
new file mode 100644
index 00000000000..ffebffc3ad2
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/validators/different_definition_foreign_keys_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionForeignKeys,
+ feature_category: :database do
+ include_examples 'foreign key validators', described_class, ['public.wrong_definition_fk']
+end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb
new file mode 100644
index 00000000000..053153aa214
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/validators/extra_foreign_keys_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraForeignKeys, feature_category: :database do
+ include_examples 'foreign key validators', described_class, ['public.extra_fk']
+end
diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb
new file mode 100644
index 00000000000..a47804abb91
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_validation/validators/missing_foreign_keys_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingForeignKeys, feature_category: :database do
+ include_examples 'foreign key validators', described_class, %w[public.fk_rails_536b96bff1 public.missing_fk]
+end
diff --git a/spec/lib/gitlab/database/tables_locker_spec.rb b/spec/lib/gitlab/database/tables_locker_spec.rb
index aaafe27f7ca..0e7e929d54b 100644
--- a/spec/lib/gitlab/database/tables_locker_spec.rb
+++ b/spec/lib/gitlab/database/tables_locker_spec.rb
@@ -251,6 +251,31 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate
it_behaves_like 'unlock partitions', gitlab_main_detached_partition, 'ci'
end
+ context 'when not including partitions' do
+ subject { described_class.new(include_partitions: false).lock_writes }
+
+ it 'does not include any table partitions' do
+ gitlab_main_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition"
+
+ expect(Gitlab::Database::LockWritesManager).not_to receive(:new).with(
+ hash_including(table_name: gitlab_main_partition)
+ )
+
+ subject
+ end
+
+ it 'does not include any detached partitions' do
+ detached_partition_name = "_test_gitlab_main_part_20220101"
+ gitlab_main_detached_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{detached_partition_name}"
+
+ expect(Gitlab::Database::LockWritesManager).not_to receive(:new).with(
+ hash_including(table_name: gitlab_main_detached_partition)
+ )
+
+ subject
+ end
+ end
+
context 'when running in dry_run mode' do
subject { described_class.new(dry_run: true).lock_writes }
diff --git a/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
deleted file mode 100644
index 1150de880b5..00000000000
--- a/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
+++ /dev/null
@@ -1,122 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::DatabaseImporters::CommonMetrics::Importer do
- subject { described_class.new }
-
- context "does import common_metrics.yml" do
- let(:groups) { subject.content['panel_groups'] }
- let(:panels) { groups.flat_map { |group| group['panels'] } }
- let(:metrics) { panels.flat_map { |group| group['metrics'] } }
- let(:metric_ids) { metrics.map { |metric| metric['id'] } }
-
- before do
- subject.execute
- end
-
- it "has the same amount of groups" do
- expect(PrometheusMetric.common.group(:group).count.count).to eq(groups.count)
- end
-
- it "has the same amount of panels" do
- expect(PrometheusMetric.common.group(:group, :title).count.count).to eq(panels.count)
- end
-
- it "has the same amount of metrics" do
- expect(PrometheusMetric.common.count).to eq(metrics.count)
- end
-
- it "does not have duplicate IDs" do
- expect(metric_ids).to eq(metric_ids.uniq)
- end
-
- it "imports all IDs" do
- expect(PrometheusMetric.common.pluck(:identifier)).to contain_exactly(*metric_ids)
- end
- end
-
- context "does import common_metrics.yml" do
- it "when executed from outside of the Rails.root" do
- Dir.chdir(Dir.tmpdir) do
- expect { subject.execute }.not_to raise_error
- end
-
- expect(PrometheusMetric.common).not_to be_empty
- end
- end
-
- context 'does import properly all fields' do
- let(:query_identifier) { 'response-metric' }
- let(:dashboard) do
- {
- panel_groups: [{
- group: 'Response metrics (NGINX Ingress)',
- panels: [{
- title: "Throughput",
- y_label: "Requests / Sec",
- metrics: [{
- id: query_identifier,
- query_range: 'my-query',
- unit: 'my-unit',
- label: 'status code'
- }]
- }]
- }]
- }
- end
-
- before do
- expect(subject).to receive(:content) { dashboard.deep_stringify_keys }
- end
-
- shared_examples 'stores metric' do
- let(:metric) { PrometheusMetric.find_by(identifier: query_identifier) }
-
- it 'with all data' do
- expect(metric.group).to eq('nginx_ingress')
- expect(metric.title).to eq('Throughput')
- expect(metric.y_label).to eq('Requests / Sec')
- expect(metric.unit).to eq('my-unit')
- expect(metric.legend).to eq('status code')
- expect(metric.query).to eq('my-query')
- end
- end
-
- context 'if ID is missing' do
- let(:query_identifier) {}
-
- it 'raises exception' do
- expect { subject.execute }.to raise_error(Gitlab::DatabaseImporters::CommonMetrics::Importer::MissingQueryId)
- end
- end
-
- context 'for existing common metric with different ID' do
- let!(:existing_metric) { create(:prometheus_metric, :common, identifier: 'my-existing-metric') }
-
- before do
- subject.execute
- end
-
- it_behaves_like 'stores metric' do
- it 'and existing metric is not changed' do
- expect(metric).not_to eq(existing_metric)
- end
- end
- end
-
- context 'when metric with ID exists ' do
- let!(:existing_metric) { create(:prometheus_metric, :common, identifier: 'response-metric') }
-
- before do
- subject.execute
- end
-
- it_behaves_like 'stores metric' do
- it 'and existing metric is changed' do
- expect(metric).to eq(existing_metric)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
deleted file mode 100644
index 98a8e144d16..00000000000
--- a/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetric do
- it 'group enum equals ::PrometheusMetric' do
- expect(described_class.groups).to eq(::PrometheusMetric.groups)
- end
-
- it '.group_titles equals ::PrometheusMetric' do
- existing_group_titles = Enums::PrometheusMetric.group_details.transform_values do |value|
- value[:group_title]
- end
- expect(Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetricEnums.group_titles).to eq(existing_group_titles)
- end
-end
diff --git a/spec/lib/gitlab/database_importers/default_organization_importer_spec.rb b/spec/lib/gitlab/database_importers/default_organization_importer_spec.rb
new file mode 100644
index 00000000000..41a8aaca699
--- /dev/null
+++ b/spec/lib/gitlab/database_importers/default_organization_importer_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DatabaseImporters::DefaultOrganizationImporter, feature_category: :cell do
+ describe '#create_default_organization' do
+ let(:default_id) { Organizations::Organization::DEFAULT_ORGANIZATION_ID }
+
+ subject { described_class.create_default_organization }
+
+ context 'when default organization does not exists' do
+ it 'creates a default organization' do
+ expect(Organizations::Organization.find_by(id: default_id)).to be_nil
+
+ subject
+
+ default_org = Organizations::Organization.find(default_id)
+
+ expect(default_org.name).to eq('Default')
+ expect(default_org.path).to eq('default')
+ end
+ end
+
+ context 'when default organization exists' do
+ let!(:default_org) { create(:organization, :default) }
+
+ it 'does not create another organization' do
+ expect { subject }.not_to change { Organizations::Organization.count }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index f2be888e6eb..ab3cd8fa5e6 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -15,6 +15,68 @@ RSpec.describe Gitlab::Database, feature_category: :database do
end
end
+ describe '.all_database_connections' do
+ it 'the first entry is always main' do
+ expect(described_class.all_database_connections.keys).to start_with('main')
+ end
+
+ it 'contains as many entries as YAML files' do
+ expect(described_class.all_database_connections.values.map(&:file_path))
+ .to contain_exactly(*described_class.all_database_connection_files)
+ end
+ end
+
+ describe '.database_base_models' do
+ subject { described_class.database_base_models }
+
+ it 'contains "main"' do
+ is_expected.to include("main" => ActiveRecord::Base)
+ end
+
+ it 'does not contain "ci" when not running CI database' do
+ skip_if_multiple_databases_are_setup(:ci)
+
+ is_expected.not_to include("ci")
+ end
+
+ it 'contains "ci" pointing to Ci::ApplicationRecord when running CI database' do
+ skip_if_multiple_databases_not_setup(:ci)
+
+ is_expected.to include("ci" => Ci::ApplicationRecord)
+ end
+ end
+
+ describe '.all_gitlab_schemas' do
+ it 'contains as many entries as YAML files' do
+ expect(described_class.all_gitlab_schemas.values.map(&:file_path))
+ .to contain_exactly(*described_class.all_gitlab_schema_files)
+ end
+ end
+
+ describe '.schemas_to_base_models' do
+ subject { described_class.schemas_to_base_models }
+
+ it 'contains gitlab_main' do
+ is_expected.to include(gitlab_main: [ActiveRecord::Base])
+ end
+
+ it 'contains gitlab_shared' do
+ is_expected.to include(gitlab_main: include(ActiveRecord::Base))
+ end
+
+ it 'contains gitlab_ci pointing to ActiveRecord::Base when not running CI database' do
+ skip_if_multiple_databases_are_setup(:ci)
+
+ is_expected.to include(gitlab_ci: [ActiveRecord::Base])
+ end
+
+ it 'contains gitlab_ci pointing to Ci::ApplicationRecord when running CI database' do
+ skip_if_multiple_databases_not_setup(:ci)
+
+ is_expected.to include(gitlab_ci: [Ci::ApplicationRecord])
+ end
+ end
+
describe '.default_pool_size' do
before do
allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
@@ -250,22 +312,35 @@ RSpec.describe Gitlab::Database, feature_category: :database do
end
describe '.db_config_names' do
- let(:expected) { %w[foo bar] }
+ using RSpec::Parameterized::TableSyntax
- it 'includes only main by default' do
- allow(::ActiveRecord::Base).to receive(:configurations).and_return(
- double(configs_for: %w[foo bar].map { |x| double(name: x) })
- )
-
- expect(described_class.db_config_names).to eq(expected)
+ where(:configs_for, :gitlab_schema, :expected_main, :expected_main_ci) do
+ %i[main] | :gitlab_shared | %i[main] | %i[main]
+ %i[main ci] | :gitlab_shared | %i[main] | %i[main ci]
+ %i[main ci] | :gitlab_ci | %i[main] | %i[ci]
end
- it 'excludes geo when that is included' do
- allow(::ActiveRecord::Base).to receive(:configurations).and_return(
- double(configs_for: %w[foo bar geo].map { |x| double(name: x) })
- )
+ with_them do
+ before do
+ hash_configs = configs_for.map do |x|
+ instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, name: x)
+ end
+ allow(::ActiveRecord::Base).to receive(:configurations).and_return(
+ instance_double(ActiveRecord::DatabaseConfigurations, configs_for: hash_configs)
+ )
+ end
- expect(described_class.db_config_names).to eq(expected)
+ if ::Gitlab::Database.has_config?(:ci)
+ it 'when main and CI database are configured' do
+ expect(described_class.db_config_names(with_schema: gitlab_schema))
+ .to eq(expected_main_ci)
+ end
+ else
+ it 'when only main database is configured' do
+ expect(described_class.db_config_names(with_schema: gitlab_schema))
+ .to eq(expected_main)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb b/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
index e59756cb7bc..86ebddc9681 100644
--- a/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/requirements_txt_linker_spec.rb
@@ -68,22 +68,22 @@ RSpec.describe Gitlab::DependencyLinker::RequirementsTxtLinker do
end
it 'links dependencies' do
- expect(subject).to include(link('nose', 'https://pypi.python.org/pypi/nose'))
- expect(subject).to include(link('nose-cov', 'https://pypi.python.org/pypi/nose-cov'))
- expect(subject).to include(link('beautifulsoup4', 'https://pypi.python.org/pypi/beautifulsoup4'))
- expect(subject).to include(link('docopt', 'https://pypi.python.org/pypi/docopt'))
- expect(subject).to include(link('keyring', 'https://pypi.python.org/pypi/keyring'))
- expect(subject).to include(link('coverage', 'https://pypi.python.org/pypi/coverage'))
- expect(subject).to include(link('Mopidy-Dirble', 'https://pypi.python.org/pypi/Mopidy-Dirble'))
- expect(subject).to include(link('rejected', 'https://pypi.python.org/pypi/rejected'))
- expect(subject).to include(link('green', 'https://pypi.python.org/pypi/green'))
- expect(subject).to include(link('Jinja2', 'https://pypi.python.org/pypi/Jinja2'))
- expect(subject).to include(link('Pygments', 'https://pypi.python.org/pypi/Pygments'))
- expect(subject).to include(link('Sphinx', 'https://pypi.python.org/pypi/Sphinx'))
- expect(subject).to include(link('docutils', 'https://pypi.python.org/pypi/docutils'))
- expect(subject).to include(link('markupsafe', 'https://pypi.python.org/pypi/markupsafe'))
- expect(subject).to include(link('pytest', 'https://pypi.python.org/pypi/pytest'))
- expect(subject).to include(link('foop', 'https://pypi.python.org/pypi/foop'))
+ expect(subject).to include(link('nose', 'https://pypi.org/project/nose/'))
+ expect(subject).to include(link('nose-cov', 'https://pypi.org/project/nose-cov/'))
+ expect(subject).to include(link('beautifulsoup4', 'https://pypi.org/project/beautifulsoup4/'))
+ expect(subject).to include(link('docopt', 'https://pypi.org/project/docopt/'))
+ expect(subject).to include(link('keyring', 'https://pypi.org/project/keyring/'))
+ expect(subject).to include(link('coverage', 'https://pypi.org/project/coverage/'))
+ expect(subject).to include(link('Mopidy-Dirble', 'https://pypi.org/project/Mopidy-Dirble/'))
+ expect(subject).to include(link('rejected', 'https://pypi.org/project/rejected/'))
+ expect(subject).to include(link('green', 'https://pypi.org/project/green/'))
+ expect(subject).to include(link('Jinja2', 'https://pypi.org/project/Jinja2/'))
+ expect(subject).to include(link('Pygments', 'https://pypi.org/project/Pygments/'))
+ expect(subject).to include(link('Sphinx', 'https://pypi.org/project/Sphinx/'))
+ expect(subject).to include(link('docutils', 'https://pypi.org/project/docutils/'))
+ expect(subject).to include(link('markupsafe', 'https://pypi.org/project/markupsafe/'))
+ expect(subject).to include(link('pytest', 'https://pypi.org/project/pytest/'))
+ expect(subject).to include(link('foop', 'https://pypi.org/project/foop/'))
end
it 'links URLs' do
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::DependencyLinker::RequirementsTxtLinker do
end
it 'does not contain link with a newline as package name' do
- expect(subject).not_to include(link("\n", "https://pypi.python.org/pypi/\n"))
+ expect(subject).not_to include(link("\n", "https://pypi.org/project/\n"))
end
end
end
diff --git a/spec/lib/gitlab/diff/formatters/file_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/file_formatter_spec.rb
new file mode 100644
index 00000000000..32e5f17f7eb
--- /dev/null
+++ b/spec/lib/gitlab/diff/formatters/file_formatter_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::Formatters::FileFormatter, feature_category: :code_review_workflow do
+ let(:base_attrs) do
+ {
+ base_sha: 123,
+ start_sha: 456,
+ head_sha: 789,
+ old_path: nil,
+ new_path: nil,
+ position_type: 'file'
+ }
+ end
+
+ let(:attrs) { base_attrs.merge(old_path: 'path.rb', new_path: 'path.rb') }
+
+ it_behaves_like 'position formatter' do
+ # rubocop:disable Fips/SHA1 (This is used to match the existing class method)
+ let(:key) do
+ [123, 456, 789,
+ Digest::SHA1.hexdigest(formatter.old_path), Digest::SHA1.hexdigest(formatter.new_path),
+ 'path.rb', 'path.rb']
+ end
+ # rubocop:enable Fips/SHA1
+ end
+
+ describe '#==' do
+ subject { described_class.new(attrs) }
+
+ it { is_expected.to eq(subject) }
+
+ [:old_path, :new_path].each do |attr|
+ context "with attribute:#{attr}" do
+ let(:other_formatter) do
+ described_class.new(attrs.merge(attr => 9))
+ end
+
+ it { is_expected.not_to eq(other_formatter) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
index 290585d0991..5270c1777bc 100644
--- a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
@@ -10,7 +10,8 @@ RSpec.describe Gitlab::Diff::Formatters::TextFormatter do
head_sha: 789,
old_path: 'old_path.txt',
new_path: 'new_path.txt',
- line_range: nil
+ line_range: nil,
+ ignore_whitespace_change: false
}
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 43e4f28b4df..c51eaa4fa18 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -217,7 +217,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache, feature_
describe '#clear' do
it 'clears cache' do
- expect_any_instance_of(Redis).to receive(:del).with(cache_key)
+ Gitlab::Redis::Cache.with { |r| expect(r).to receive(:del).with(cache_key) }
cache.clear
end
@@ -241,7 +241,8 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache, feature_
end
it "uses ActiveSupport::Gzip to compress data when writing to cache" do
- expect(ActiveSupport::Gzip).to receive(:compress).and_call_original
+ # at least once as Gitlab::Redis::Cache is a multistore
+ expect(ActiveSupport::Gzip).to receive(:compress).at_least(1).and_call_original
cache.send(:write_to_redis_hash, diff_hash)
end
diff --git a/spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb b/spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb
new file mode 100644
index 00000000000..0d03f7ce6ca
--- /dev/null
+++ b/spec/lib/gitlab/diff/position_tracer/file_strategy_spec.rb
@@ -0,0 +1,238 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::PositionTracer::FileStrategy, feature_category: :code_review_workflow do
+ include PositionTracerHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let(:current_user) { project.first_owner }
+ let(:file_name) { 'test-file' }
+ let(:new_file_name) { "#{file_name}-new" }
+ let(:second_file_name) { "#{file_name}-2" }
+ let(:branch_name) { 'position-tracer-test' }
+ let(:old_position) { position(old_path: file_name, new_path: file_name, position_type: 'file') }
+
+ let(:tracer) do
+ Gitlab::Diff::PositionTracer.new(
+ project: project,
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs
+ )
+ end
+
+ let(:strategy) { described_class.new(tracer) }
+
+ let(:initial_commit) do
+ project.commit(create_branch(branch_name, 'master')[:branch]&.name || 'master')
+ end
+
+ subject { strategy.trace(old_position) }
+
+ describe '#trace' do
+ describe 'diff scenarios' do
+ let(:create_file_commit) do
+ initial_commit
+
+ create_file(
+ branch_name,
+ file_name,
+ Base64.encode64('content')
+ )
+ end
+
+ let(:update_file_commit) do
+ create_file_commit
+
+ update_file(
+ branch_name,
+ file_name,
+ Base64.encode64('updatedcontent')
+ )
+ end
+
+ let(:update_file_again_commit) do
+ update_file_commit
+
+ update_file(
+ branch_name,
+ file_name,
+ Base64.encode64('updatedcontentagain')
+ )
+ end
+
+ let(:delete_file_commit) do
+ create_file_commit
+ delete_file(branch_name, file_name)
+ end
+
+ let(:rename_file_commit) do
+ delete_file_commit
+
+ create_file(
+ branch_name,
+ new_file_name,
+ Base64.encode64('renamedcontent')
+ )
+ end
+
+ let(:create_second_file_commit) do
+ create_file_commit
+
+ create_file(
+ branch_name,
+ second_file_name,
+ Base64.encode64('morecontent')
+ )
+ end
+
+ let(:create_another_file_commit) do
+ create_file(
+ branch_name,
+ second_file_name,
+ Base64.encode64('morecontent')
+ )
+ end
+
+ let(:update_another_file_commit) do
+ update_file(
+ branch_name,
+ second_file_name,
+ Base64.encode64('updatedmorecontent')
+ )
+ end
+
+ context 'when the file was created in the old diff' do
+ context 'when the file is unchanged between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, create_second_file_commit) }
+
+ it 'returns the new position' do
+ expect_new_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was updated between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, update_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was renamed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, rename_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, rename_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was removed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(initial_commit, delete_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, delete_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file is unchanged in the new diff' do
+ let(:old_diff_refs) { diff_refs(initial_commit, create_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_another_file_commit, update_another_file_commit) }
+ let(:change_diff_refs) { diff_refs(initial_commit, create_another_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+ end
+
+ context 'when the file was changed in the old diff' do
+ context 'when the file is unchanged in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, create_second_file_commit) }
+
+ it 'returns the new position' do
+ expect_new_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was updated in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, update_file_again_commit) }
+ let(:change_diff_refs) { diff_refs(update_file_commit, update_file_again_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was renamed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, rename_file_commit) }
+ let(:change_diff_refs) { diff_refs(update_file_commit, rename_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file was removed in between the old and the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_file_commit, delete_file_commit) }
+ let(:change_diff_refs) { diff_refs(update_file_commit, delete_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+
+ context 'when the file is unchanged in the new diff' do
+ let(:old_diff_refs) { diff_refs(create_file_commit, update_file_commit) }
+ let(:new_diff_refs) { diff_refs(create_another_file_commit, update_another_file_commit) }
+ let(:change_diff_refs) { diff_refs(create_file_commit, create_another_file_commit) }
+
+ it 'returns the position of the change' do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb
index 9b0ea892f91..4aa4f160fc9 100644
--- a/spec/lib/gitlab/diff/position_tracer_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer_spec.rb
@@ -18,8 +18,13 @@ RSpec.describe Gitlab::Diff::PositionTracer do
let(:project) { double }
let(:old_diff_refs) { diff_refs }
let(:new_diff_refs) { diff_refs }
- let(:position) { double(on_text?: on_text?, diff_refs: diff_refs) }
+ let(:on_file?) { false }
+ let(:on_text?) { false }
let(:tracer) { double }
+ let(:position) do
+ double(on_text?: on_text?, on_image?: false, on_file?: on_file?, diff_refs: diff_refs,
+ ignore_whitespace_change: false)
+ end
context 'position is on text' do
let(:on_text?) { true }
@@ -48,6 +53,20 @@ RSpec.describe Gitlab::Diff::PositionTracer do
subject.trace(position)
end
end
+
+ context 'position on file' do
+ let(:on_file?) { true }
+
+ it 'calls ImageStrategy#trace' do
+ expect(Gitlab::Diff::PositionTracer::FileStrategy)
+ .to receive(:new)
+ .with(subject)
+ .and_return(tracer)
+ expect(tracer).to receive(:trace).with(position)
+
+ subject.trace(position)
+ end
+ end
end
describe 'diffs methods' do
diff --git a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
index 0dc0f50b104..30981e4bd7d 100644
--- a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
@@ -41,81 +41,57 @@ RSpec.describe Gitlab::DiscussionsDiff::HighlightCache, :clean_gitlab_redis_cach
end
describe '#read_multiple' do
- shared_examples 'read multiple keys' do
- it 'reads multiple keys and serializes content into Gitlab::Diff::Line objects' do
- described_class.write_multiple(mapping)
-
- found = described_class.read_multiple(mapping.keys)
-
- expect(found.size).to eq(2)
- expect(found.first.size).to eq(2)
- expect(found.first).to all(be_a(Gitlab::Diff::Line))
- end
-
- it 'returns nil when cached key is not found' do
- described_class.write_multiple(mapping)
+ it 'reads multiple keys and serializes content into Gitlab::Diff::Line objects' do
+ described_class.write_multiple(mapping)
- found = described_class.read_multiple([2, 3])
+ found = described_class.read_multiple(mapping.keys)
- expect(found.size).to eq(2)
+ expect(found.size).to eq(2)
+ expect(found.first.size).to eq(2)
+ expect(found.first).to all(be_a(Gitlab::Diff::Line))
+ end
- expect(found.first).to eq(nil)
- expect(found.second.size).to eq(2)
- expect(found.second).to all(be_a(Gitlab::Diff::Line))
- end
+ it 'returns nil when cached key is not found' do
+ described_class.write_multiple(mapping)
- it 'returns lines which rich_text are HTML-safe' do
- described_class.write_multiple(mapping)
+ found = described_class.read_multiple([2, 3])
- found = described_class.read_multiple(mapping.keys)
- rich_texts = found.flatten.map(&:rich_text)
+ expect(found.size).to eq(2)
- expect(rich_texts).to all(be_html_safe)
- end
+ expect(found.first).to eq(nil)
+ expect(found.second.size).to eq(2)
+ expect(found.second).to all(be_a(Gitlab::Diff::Line))
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ it 'returns lines which rich_text are HTML-safe' do
+ described_class.write_multiple(mapping)
- it_behaves_like 'read multiple keys'
- end
+ found = described_class.read_multiple(mapping.keys)
+ rich_texts = found.flatten.map(&:rich_text)
- it_behaves_like 'read multiple keys'
+ expect(rich_texts).to all(be_html_safe)
+ end
end
describe '#clear_multiple' do
- shared_examples 'delete multiple keys' do
- it 'removes all named keys' do
- described_class.write_multiple(mapping)
-
- described_class.clear_multiple(mapping.keys)
-
- expect(described_class.read_multiple(mapping.keys)).to all(be_nil)
- end
+ it 'removes all named keys' do
+ described_class.write_multiple(mapping)
- it 'only removed named keys' do
- to_clear, to_leave = mapping.keys
+ described_class.clear_multiple(mapping.keys)
- described_class.write_multiple(mapping)
- described_class.clear_multiple([to_clear])
+ expect(described_class.read_multiple(mapping.keys)).to all(be_nil)
+ end
- cleared, left = described_class.read_multiple([to_clear, to_leave])
+ it 'only removed named keys' do
+ to_clear, to_leave = mapping.keys
- expect(cleared).to be_nil
- expect(left).to all(be_a(Gitlab::Diff::Line))
- end
- end
+ described_class.write_multiple(mapping)
+ described_class.clear_multiple([to_clear])
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ cleared, left = described_class.read_multiple([to_clear, to_leave])
- it_behaves_like 'delete multiple keys'
+ expect(cleared).to be_nil
+ expect(left).to all(be_a(Gitlab::Diff::Line))
end
-
- it_behaves_like 'delete multiple keys'
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 7bba0775668..ef2acc9ec92 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
+RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :service_desk do
include ServiceDeskHelper
include_context 'email shared context'
@@ -67,6 +67,22 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
end
+ context 'when encoding of an email is iso-8859-2' do
+ let(:email_raw) { email_fixture('emails/service_desk_encoding.eml') }
+ let(:expected_description) do
+ "Body of encoding iso-8859-2 test: ťžščľžťťč"
+ end
+
+ it 'creates a new issue with readable subject and body' do
+ expect { receiver.execute }.to change { Issue.count }.by(1)
+
+ new_issue = Issue.last
+
+ expect(new_issue.title).to eq("Testing encoding iso-8859-2 ťžščľžťťč")
+ expect(new_issue.description).to eq(expected_description.strip)
+ end
+ end
+
context 'when everything is fine' do
it_behaves_like 'a new issue request'
diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb
index 35065b74eff..05c8559e30f 100644
--- a/spec/lib/gitlab/email/reply_parser_spec.rb
+++ b/spec/lib/gitlab/email/reply_parser_spec.rb
@@ -380,5 +380,39 @@ RSpec.describe Gitlab::Email::ReplyParser, feature_category: :team_planning do
end
end
end
+
+ context 'iso-8859-2 content' do
+ let(:raw_content) do
+ <<-BODY.strip_heredoc.chomp
+ From: Jake the Dog <jake@adventuretime.ooo>
+ To: <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>
+ Subject: =?iso-8859-2?B?VGVzdGluZyBlbmNvZGluZyBpc28tODg1OS0yILu+uei1vru76A==?=
+ Date: Wed, 31 May 2023 18:43:32 +0200
+ Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+ MIME-Version: 1.0
+ Content-Type: multipart/alternative;
+ boundary="----=_NextPart_000_0001_01D993EF.CDD81EA0"
+ X-Mailer: Microsoft Outlook 16.0
+ Thread-Index: AdmT3ur1lfLfsfGgRM699GyWkjowfg==
+ Content-Language: en-us
+
+ This is a multipart message in MIME format.
+
+ ------=_NextPart_000_0001_01D993EF.CDD81EA0
+ Content-Type: text/plain;
+ charset="iso-8859-2"
+ Content-Transfer-Encoding: base64
+
+ Qm9keSBvZiBlbmNvZGluZyBpc28tODg1OS0yIHRlc3Q6ILu+uei1vru76A0KDQo=
+ BODY
+ end
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(raw_content, { trim_reply: false }))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ Body of encoding iso-8859-2 test: ťžščľžťťč\r\n\r\n
+ BODY
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
index bcd59c34ea2..c25cba704b3 100644
--- a/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
+++ b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
before do
# Disabled in spec_helper by default thus we need to enable it here.
- stub_feature_flags(use_click_house_database_for_error_tracking: true)
+ stub_feature_flags(gitlab_error_tracking: true)
end
shared_examples 'exception logging' do
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
it 'returns detailed error' do
is_expected.to have_attributes(
id: error.fingerprint.to_s,
- title: error.name,
+ title: "#{error.name}: #{error.description}",
message: error.description,
culprit: error.actor,
first_seen: error.first_seen_at.to_s,
@@ -97,10 +97,43 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
tags: { level: nil, logger: nil },
external_url: "http://localhost/#{project.full_path}/-/error_tracking/#{error.fingerprint}/details",
external_base_url: "http://localhost/#{project.full_path}",
- integrated: true
+ integrated: true,
+ frequency: [[1, 2], [3, 4]]
)
end
+ context 'with missing stats' do
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id, stats: nil) }
+
+ it 'returns empty frequency' do
+ is_expected.to have_attributes(
+ frequency: []
+ )
+ end
+ end
+
+ context 'with missing frequency' do
+ let(:empty_freq) { build(:error_tracking_open_api_error_stats, { frequency: nil }) }
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id, stats: empty_freq) }
+
+ it 'returns empty frequency' do
+ is_expected.to have_attributes(
+ frequency: []
+ )
+ end
+ end
+
+ context 'with missing frequency data' do
+ let(:empty_freq) { build(:error_tracking_open_api_error_stats, { frequency: {} }) }
+ let(:error) { build(:error_tracking_open_api_error, project_id: project.id, stats: empty_freq) }
+
+ it 'returns empty frequency' do
+ is_expected.to have_attributes(
+ frequency: []
+ )
+ end
+ end
+
it 'returns no first and last release version' do
is_expected.to have_attributes(
first_release_version: nil,
@@ -187,14 +220,15 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
expect(result_errors).to all(
have_attributes(
id: error.fingerprint.to_s,
- title: error.name,
+ title: "#{error.name}: #{error.description}",
message: error.description,
culprit: error.actor,
first_seen: error.first_seen_at,
last_seen: error.last_seen_at,
status: error.status,
count: error.event_count,
- user_count: error.approximated_user_count
+ user_count: error.approximated_user_count,
+ frequency: [[1, 2], [3, 4]]
))
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 33d322d0d44..3399c6dd9f4 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor, :sentry do
+RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor, :sentry, feature_category: :integrations do
describe '.call' do
let(:raven_required_options) do
{
diff --git a/spec/lib/gitlab/external_authorization/cache_spec.rb b/spec/lib/gitlab/external_authorization/cache_spec.rb
index a8e7932b82c..186bf7d7ec1 100644
--- a/spec/lib/gitlab/external_authorization/cache_spec.rb
+++ b/spec/lib/gitlab/external_authorization/cache_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Cache, :clean_gitlab_redis_cache d
def set_in_redis(key, value)
Gitlab::Redis::Cache.with do |redis|
- redis.hmset(cache_key, key, value)
+ redis.hset(cache_key, key, value)
end
end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index 98fb154fb05..b8829cc794c 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -127,8 +127,8 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
form_builder.gitlab_ui_checkbox_component(
:view_diffs_file_by_file
) do |c|
- c.label { "Show one file at a time on merge request's Changes tab" }
- c.help_text { 'Instead of all the files changed, show only one file at a time.' }
+ c.with_label { "Show one file at a time on merge request's Changes tab" }
+ c.with_help_text { 'Instead of all the files changed, show only one file at a time.' }
end
end
@@ -208,8 +208,8 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
:access_level,
:admin
) do |c|
- c.label { "Admin" }
- c.help_text { 'Administrators have access to all groups, projects, and users and can manage all features in this installation' }
+ c.with_label { "Admin" }
+ c.with_help_text { 'Administrators have access to all groups, projects, and users and can manage all features in this installation' }
end
end
diff --git a/spec/lib/gitlab/git/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb
index 67f288e0299..d3ee0b8d1ce 100644
--- a/spec/lib/gitlab/git/conflict/parser_spec.rb
+++ b/spec/lib/gitlab/git/conflict/parser_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe Gitlab::Git::Conflict::Parser do
.to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
- it 'raises UnmergeableFile when the file is over 200 KB' do
+ it 'raises UnmergeableFile when the file is over 200 KiB' do
expect { parse_text('a' * 204801) }
.to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 06904849ef5..b137157f2d5 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -41,8 +41,8 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
it 'gets the branch name from GitalyClient' do
- expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(:default_branch_name)
- repository.root_ref
+ expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(:default_branch_name).with(head_only: true)
+ repository.root_ref(head_only: true)
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RefService, :default_branch_name do
@@ -1454,7 +1454,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
it "returns the number of commits in the whole repository" do
options = { all: true }
- expect(repository.count_commits(options)).to eq(315)
+ expect(repository.count_commits(options)).to eq(322)
end
end
@@ -1675,6 +1675,41 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(collection).to be_a(Enumerable)
expect(collection.to_a).to be_empty
end
+
+ describe 'merge_commit_diff_mode argument' do
+ let(:gitaly_commit_client) { double('Gitlab::GitalyClient::CommitService') }
+
+ before do
+ allow(repository).to receive(:gitaly_commit_client).and_return(gitaly_commit_client)
+ allow(gitaly_commit_client).to receive(:find_changed_paths)
+ end
+
+ context 'when omitted' do
+ before do
+ repository.find_changed_paths(['sha'])
+ end
+
+ it 'defaults to nil' do
+ expect(gitaly_commit_client)
+ .to have_received(:find_changed_paths)
+ .with(['sha'], merge_commit_diff_mode: nil)
+ end
+ end
+
+ context 'when included' do
+ let(:passed_value) { 'foobar' }
+
+ before do
+ repository.find_changed_paths(['sha'], merge_commit_diff_mode: passed_value)
+ end
+
+ it 'passes the value on to the commit client' do
+ expect(gitaly_commit_client)
+ .to have_received(:find_changed_paths)
+ .with(['sha'], merge_commit_diff_mode: passed_value)
+ end
+ end
+ end
end
describe "#ls_files" do
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index 240cf6ed46f..a15c74a058d 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Tag do
+RSpec.describe Gitlab::Git::Tag, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:repository) { project.repository.raw }
@@ -17,10 +17,9 @@ RSpec.describe Gitlab::Git::Tag do
it { expect(tag.has_signature?).to be_falsey }
it { expect(tag.signature_type).to eq(:NONE) }
it { expect(tag.signature).to be_nil }
- it { expect(tag.tagger.name).to eq("Dmitriy Zaporozhets") }
- it { expect(tag.tagger.email).to eq("dmitriy.zaporozhets@gmail.com") }
- it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1393491299)) }
- it { expect(tag.tagger.timezone).to eq("+0200") }
+ it { expect(tag.user_name).to eq("Dmitriy Zaporozhets") }
+ it { expect(tag.user_email).to eq("dmitriy.zaporozhets@gmail.com") }
+ it { expect(tag.date).to eq(Time.at(1393491299).utc) }
end
describe 'signed tag' do
@@ -33,10 +32,9 @@ RSpec.describe Gitlab::Git::Tag do
it { expect(tag.has_signature?).to be_truthy }
it { expect(tag.signature_type).to eq(:X509) }
it { expect(tag.signature).not_to be_nil }
- it { expect(tag.tagger.name).to eq("Roger Meier") }
- it { expect(tag.tagger.email).to eq("r.meier@siemens.com") }
- it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1574261780)) }
- it { expect(tag.tagger.timezone).to eq("+0100") }
+ it { expect(tag.user_name).to eq("Roger Meier") }
+ it { expect(tag.user_email).to eq("r.meier@siemens.com") }
+ it { expect(tag.date).to eq(Time.at(1574261780).utc) }
end
it { expect(repository.tags.size).to be > 0 }
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 05205ab6d6a..70c4a2a71ff 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -168,25 +168,277 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
end
describe '#find_changed_paths' do
- let(:commits) { %w[1a0b36b3cdad1d2ee32457c102a8c0b7056fa863 cfe32cf61b73a0d5e9f13e774abde7ff789b1660] }
+ let(:mapped_merge_commit_diff_mode) { described_class::MERGE_COMMIT_DIFF_MODES[merge_commit_diff_mode] }
+ let(:commits) do
+ %w[
+ ade1c0b4b116209ed2a9958436b26f89085ec383
+ 594937c22df7a093888ff13af518f2b683f5f719
+ 760c58db5a6f3b64ad7e3ff6b3c4a009da7d9b33
+ 2b298117a741cdb06eb48df2c33f1390cf89f7e8
+ c41e12c387b4e0e41bfc17208252d6a6430f2fcd
+ 1ada92f78a19f27cb442a0a205f1c451a3a15432
+ ]
+ end
- it 'sends an RPC request and returns the stats' do
- request = Gitaly::FindChangedPathsRequest.new(repository: repository_message,
- commits: commits)
+ let(:requests) do
+ commits.map do |commit|
+ Gitaly::FindChangedPathsRequest::Request.new(
+ commit_request: Gitaly::FindChangedPathsRequest::Request::CommitRequest.new(commit_revision: commit)
+ )
+ end
+ end
+
+ let(:request) do
+ Gitaly::FindChangedPathsRequest.new(repository: repository_message, requests: requests, merge_commit_diff_mode: merge_commit_diff_mode)
+ end
+
+ subject { described_class.new(repository).find_changed_paths(commits, merge_commit_diff_mode: merge_commit_diff_mode).as_json }
+
+ before do
+ allow(Gitaly::FindChangedPathsRequest).to receive(:new).and_call_original
+ end
+
+ shared_examples 'includes paths different in any parent' do
+ let(:changed_paths) do
+ [
+ { path: 'files/locked/foo.lfs', status: 'ADDED' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ ].as_json
+ end
+
+ it 'returns all paths, including ones from merge commits' do
+ is_expected.to eq(changed_paths)
+ end
+ end
+
+ shared_examples 'includes paths different in all parents' do
+ let(:changed_paths) do
+ [
+ { path: 'files/locked/foo.lfs', status: 'ADDED' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ ].as_json
+ end
+
+ it 'returns only paths different in all parents' do
+ is_expected.to eq(changed_paths)
+ end
+ end
+
+ shared_examples 'uses requests format' do
+ it 'passes the revs via the requests kwarg as CommitRequest objects' do
+ subject
+ expect(Gitaly::FindChangedPathsRequest)
+ .to have_received(:new).with(
+ repository: repository_message,
+ requests: requests,
+ merge_commit_diff_mode: mapped_merge_commit_diff_mode
+ )
+ end
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in all parents'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when feature flag "merge_commit_diff_modes" is disabled' do
+ let(:mapped_merge_commit_diff_mode) { nil }
+
+ before do
+ stub_feature_flags(merge_commit_diff_modes: false)
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses requests format'
+ end
+ end
+
+ context 'when feature flag "find_changed_paths_new_format" is disabled' do
+ before do
+ stub_feature_flags(find_changed_paths_new_format: false)
+ end
+
+ shared_examples 'uses commits format' do
+ it do
+ subject
+ expect(Gitaly::FindChangedPathsRequest)
+ .to have_received(:new).with(
+ repository: repository_message,
+ commits: commits,
+ merge_commit_diff_mode: mapped_merge_commit_diff_mode
+ )
+ end
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
- changed_paths_response = Gitaly::FindChangedPathsResponse.new(
- paths: [{
- path: "app/assets/javascripts/boards/components/project_select.vue",
- status: :MODIFIED
- }])
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
- expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:find_changed_paths)
- .with(request, kind_of(Hash)).and_return([changed_paths_response])
+ include_examples 'includes paths different in any parent'
- returned_value = described_class.new(repository).find_changed_paths(commits)
- mapped_expected_value = changed_paths_response.paths.map { |path| Gitlab::Git::ChangedPath.new(status: path.status, path: path.path) }
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in all parents'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when feature flag "merge_commit_diff_modes" is disabled' do
+ let(:mapped_merge_commit_diff_mode) { nil }
+
+ before do
+ stub_feature_flags(merge_commit_diff_modes: false)
+ end
+
+ context 'when merge_commit_diff_mode is nil' do
+ let(:merge_commit_diff_mode) { nil }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :unspecified' do
+ let(:merge_commit_diff_mode) { :unspecified }
- expect(returned_value.as_json).to eq(mapped_expected_value.as_json)
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :include_merges' do
+ let(:merge_commit_diff_mode) { :include_merges }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is invalid' do
+ let(:merge_commit_diff_mode) { 'invalid' }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+
+ context 'when merge_commit_diff_mode is :all_parents' do
+ let(:merge_commit_diff_mode) { :all_parents }
+
+ include_examples 'includes paths different in any parent'
+
+ include_examples 'uses commits format'
+ end
+ end
end
end
@@ -208,6 +460,19 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
is_expected.to eq([[], nil])
end
+ context 'when recursive is "true"' do
+ let(:recursive) { true }
+
+ it 'sends a get_tree_entries message without the limit' do
+ expect_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:get_tree_entries)
+ .with(gitaly_request_with_params({ pagination_params: nil }), kind_of(Hash))
+ .and_return([])
+
+ is_expected.to eq([[], nil])
+ end
+ end
+
context 'with UTF-8 params strings' do
let(:revision) { "branch\u011F" }
let(:path) { "foo/\u011F.txt" }
@@ -241,6 +506,129 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
is_expected.to eq([[], pagination_cursor])
end
end
+
+ context 'with structured errors' do
+ context 'with ResolveTree error' do
+ before do
+ expect_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:get_tree_entries)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_raise(raised_error)
+ end
+
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INVALID_ARGUMENT,
+ "invalid revision or path",
+ Gitaly::GetTreeEntriesError.new(
+ resolve_tree: Gitaly::ResolveRevisionError.new(
+ revision: "incorrect revision"
+ )))
+ end
+
+ it 'raises an IndexError' do
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::Index::IndexError)
+ expect(error.message).to eq("invalid revision or path")
+ end
+ end
+ end
+
+ context 'with Path error' do
+ let(:status_code) { nil }
+ let(:expected_error) { nil }
+
+ let(:structured_error) do
+ new_detailed_error(
+ status_code,
+ "invalid revision or path",
+ expected_error)
+ end
+
+ shared_examples '#get_tree_entries path failure' do
+ it 'raises an IndexError' do
+ expect_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:get_tree_entries).with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_raise(structured_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::Index::IndexError)
+ expect(error.message).to eq(expected_message)
+ end
+ end
+ end
+
+ context 'with missing file' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "You must provide a file path" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "random path",
+ error_type: :ERROR_TYPE_EMPTY_PATH
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with path including traversal' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Path cannot include traversal syntax" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "foo/../bar",
+ error_type: :ERROR_TYPE_RELATIVE_PATH_ESCAPES_REPOSITORY
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with absolute path' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Only relative path is accepted" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "/bar/foo",
+ error_type: :ERROR_TYPE_ABSOLUTE_PATH
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with long path' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Path is too long" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "long/path/",
+ error_type: :ERROR_TYPE_LONG_PATH
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+
+ context 'with unkown path error' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Unknown path error" }
+ let(:expected_error) do
+ Gitaly::GetTreeEntriesError.new(
+ path: Gitaly::PathError.new(
+ path: "unkown error",
+ error_type: :ERROR_TYPE_UNSPECIFIED
+ ))
+ end
+
+ it_behaves_like '#get_tree_entries path failure'
+ end
+ end
+ end
end
describe '#commit_count' do
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 84672eb81c0..869195a92b3 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stderr without prefix' do
let(:stderr) { "something" }
let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stderr }
it_behaves_like 'failed branch creation'
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stdout without prefix' do
let(:stderr) { " \n" }
let(:stdout) { "something" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stdout }
it_behaves_like 'failed branch creation'
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain no stderr or stdout' do
let(:stderr) { " \n" }
let(:stdout) { "\n \n" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { "\n \n" }
it_behaves_like 'failed branch creation'
@@ -250,7 +250,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stderr' do
let(:stderr) { "something" }
let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stderr }
it_behaves_like 'a failed branch deletion'
@@ -259,7 +259,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stdout' do
let(:stderr) { " \n" }
let(:stdout) { "something" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stdout }
it_behaves_like 'a failed branch deletion'
@@ -377,7 +377,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stderr without prefix' do
let(:stderr) { "something" }
let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stderr }
it_behaves_like 'a failed merge'
@@ -395,7 +395,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain stdout without prefix' do
let(:stderr) { " \n" }
let(:stdout) { "something" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { stdout }
it_behaves_like 'a failed merge'
@@ -413,7 +413,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source
context 'when details contain no stderr or stdout' do
let(:stderr) { " \n" }
let(:stdout) { "\n \n" }
- let(:expected_message) { error_message }
+ let(:expected_message) { Gitlab::GitalyClient::OperationService::CUSTOM_HOOK_FALLBACK_MESSAGE }
let(:expected_raw_message) { "\n \n" }
it_behaves_like 'a failed merge'
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 7bdfa8922d3..fe04ad36e9a 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -138,101 +138,64 @@ RSpec.describe Gitlab::GitalyClient::RefService, feature_category: :gitaly do
expect_any_instance_of(Gitaly::RefService::Stub)
.to receive(:find_default_branch_name)
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(name: 'foo'))
+ .and_return(double(name: 'refs/heads/main'))
- client.default_branch_name
+ response = client.default_branch_name
+
+ expect(response).to eq('main')
end
end
describe '#local_branches' do
let(:remote_name) { 'my_remote' }
- shared_examples 'common examples' do
- it 'sends a find_local_branches message' do
- target_commits = create_list(:gitaly_commit, 4)
- branches = target_commits.each_with_index.map do |gitaly_commit, i|
- Gitaly::FindLocalBranchResponse.new(
- name: "#{remote_name}/#{i}",
- commit: gitaly_commit,
- commit_author: Gitaly::FindLocalBranchCommitAuthor.new(
- name: gitaly_commit.author.name,
- email: gitaly_commit.author.email,
- date: gitaly_commit.author.date,
- timezone: gitaly_commit.author.timezone
- ),
- commit_committer: Gitaly::FindLocalBranchCommitAuthor.new(
- name: gitaly_commit.committer.name,
- email: gitaly_commit.committer.email,
- date: gitaly_commit.committer.date,
- timezone: gitaly_commit.committer.timezone
- )
- )
- end
-
- local_branches = target_commits.each_with_index.map do |gitaly_commit, i|
- Gitaly::Branch.new(name: "#{remote_name}/#{i}", target_commit: gitaly_commit)
- end
-
- response = if set_local_branches
- [
- Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[0, 2]),
- Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[2, 2])
- ]
- else
- [
- Gitaly::FindLocalBranchesResponse.new(branches: branches[0, 2]),
- Gitaly::FindLocalBranchesResponse.new(branches: branches[2, 2])
- ]
- end
-
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(response)
-
- subject = client.local_branches
+ it 'sends a find_local_branches message' do
+ target_commits = create_list(:gitaly_commit, 4)
- expect(subject.length).to be(target_commits.length)
+ local_branches = target_commits.each_with_index.map do |gitaly_commit, i|
+ Gitaly::Branch.new(name: "#{remote_name}/#{i}", target_commit: gitaly_commit)
end
- it 'parses and sends the sort parameter' do
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_params(sort_by: :UPDATED_DESC), kind_of(Hash))
- .and_return([])
+ response = [
+ Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[0, 2]),
+ Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[2, 2])
+ ]
- client.local_branches(sort_by: 'updated_desc')
- end
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(response)
- it 'translates known mismatches on sort param values' do
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
- .and_return([])
+ subject = client.local_branches
- client.local_branches(sort_by: 'name_asc')
- end
+ expect(subject.length).to be(target_commits.length)
+ end
- it 'uses default sort by name' do
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:find_local_branches)
- .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
- .and_return([])
+ it 'parses and sends the sort parameter' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_params(sort_by: :UPDATED_DESC), kind_of(Hash))
+ .and_return([])
- client.local_branches(sort_by: 'invalid')
- end
+ client.local_branches(sort_by: 'updated_desc')
end
- context 'when local_branches variable is not set' do
- let(:set_local_branches) { false }
+ it 'translates known mismatches on sort param values' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
+ .and_return([])
- it_behaves_like 'common examples'
+ client.local_branches(sort_by: 'name_asc')
end
- context 'when local_branches variable is set' do
- let(:set_local_branches) { true }
+ it 'uses default sort by name' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_local_branches)
+ .with(gitaly_request_with_params(sort_by: :NAME), kind_of(Hash))
+ .and_return([])
- it_behaves_like 'common examples'
+ client.local_branches(sort_by: 'invalid')
end
end
diff --git a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
index 6bfbfbdeddf..cbcd9b83c15 100644
--- a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_category: :importers do
- subject { described_class.new(gist_object, user.id).execute }
+ subject { described_class.new(gist_object, user.id) }
let_it_be(:user) { create(:user) }
let(:created_at) { Time.utc(2022, 1, 9, 12, 15) }
@@ -18,7 +18,8 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
first_file: gist_file,
git_pull_url: url,
created_at: created_at,
- updated_at: updated_at
+ updated_at: updated_at,
+ total_files_size: Gitlab::CurrentSettings.snippet_size_limit
)
end
@@ -36,34 +37,103 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
describe '#execute' do
context 'when success' do
+ let(:validator_result) do
+ instance_double(ServiceResponse, error?: false)
+ end
+
it 'creates expected snippet and snippet repository' do
+ expect_next_instance_of(Snippets::RepositoryValidationService) do |validator|
+ expect(validator).to receive(:execute).and_return(validator_result)
+ end
+
expect_next_instance_of(Repository) do |repository|
expect(repository).to receive(:fetch_as_mirror)
end
- expect { subject }.to change { user.snippets.count }.by(1)
+ expect { subject.execute }.to change { user.snippets.count }.by(1)
expect(user.snippets[0].attributes).to include expected_snippet_attrs
end
end
- context 'when file size limit exeeded' do
- before do
- files = [].tap { |array| 11.times { |n| array << ["file#{n}.txt", {}] } }.to_h
+ describe 'pre-import validations' do
+ context 'when file count limit exeeded' do
+ before do
+ files = [].tap { |array| 11.times { |n| array << ["file#{n}.txt", {}] } }.to_h
+
+ allow(gist_object).to receive(:files).and_return(files)
+ end
+
+ it 'validates input and returns error' do
+ expect(PersonalSnippet).not_to receive(:new)
+
+ result = subject.execute
+
+ expect(user.snippets.count).to eq(0)
+ expect(result.error?).to eq(true)
+ expect(result.errors).to match_array(['Snippet maximum file count exceeded'])
+ end
+ end
+
+ context 'when repo too big' do
+ before do
+ files = [{ "file1.txt" => {} }, { "file2.txt" => {} }]
+
+ allow(gist_object).to receive(:files).and_return(files)
+ allow(gist_object).to receive(:total_files_size).and_return(Gitlab::CurrentSettings.snippet_size_limit + 1)
+ end
+
+ it 'validates input and returns error' do
+ expect(PersonalSnippet).not_to receive(:new)
+
+ result = subject.execute
+
+ expect(result.error?).to eq(true)
+ expect(result.errors).to match_array(['Snippet repository size exceeded'])
+ end
+ end
+ end
+ describe 'post-import validations' do
+ let(:files) { { "file1.txt" => {}, "file2.txt" => {} } }
+
+ before do
allow(gist_object).to receive(:files).and_return(files)
allow_next_instance_of(Repository) do |repository|
allow(repository).to receive(:fetch_as_mirror)
- allow(repository).to receive(:empty?).and_return(false)
- allow(repository).to receive(:ls_files).and_return(files.keys)
+ end
+ allow_next_instance_of(Snippets::RepositoryValidationService) do |validator|
+ allow(validator).to receive(:execute).and_return(validator_result)
end
end
- it 'returns error' do
- result = subject
+ context 'when file count limit exeeded' do
+ let(:validator_result) do
+ instance_double(ServiceResponse, error?: true, message: 'Error: Repository files count over the limit')
+ end
- expect(user.snippets.count).to eq(0)
- expect(result.error?).to eq(true)
- expect(result.errors).to match_array(['Snippet maximum file count exceeded'])
+ it 'returns error' do
+ expect(subject).to receive(:remove_snippet_and_repository).and_call_original
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(result.errors).to match_array(['Error: Repository files count over the limit'])
+ end
+ end
+
+ context 'when repo too big' do
+ let(:validator_result) do
+ instance_double(ServiceResponse, error?: true, message: 'Error: Repository size is above the limit.')
+ end
+
+ it 'returns error' do
+ expect(subject).to receive(:remove_snippet_and_repository).and_call_original
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(result.errors).to match_array(['Error: Repository size is above the limit.'])
+ end
end
end
@@ -71,7 +141,8 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
let(:gist_file) { { file_name: '_Summary.md', file_content: nil } }
it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Content can't be blank")
+ expect { subject.execute }
+ .to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Content can't be blank")
end
end
@@ -82,7 +153,9 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
expect(repository).to receive(:remove)
end
- expect { subject }.to raise_error(Gitlab::Shell::Error)
+ expect(subject).to receive(:remove_snippet_and_repository).and_call_original
+
+ expect { subject.execute }.to raise_error(Gitlab::Shell::Error)
expect(user.snippets.count).to eq(0)
end
end
@@ -103,7 +176,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
allow_localhost: true, allow_local_network: true)
.and_raise(Gitlab::UrlBlocker::BlockedUrlError)
- expect { subject }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { subject.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
end
end
@@ -120,7 +193,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
allow_localhost: false, allow_local_network: false)
.and_raise(Gitlab::UrlBlocker::BlockedUrlError)
- expect { subject }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ expect { subject.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
end
end
end
diff --git a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
index dc9f939a19b..84d6713efdb 100644
--- a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
+++ b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader do
it 'raises expected exception' do
expect { downloader.perform }.to raise_exception(
- Gitlab::Utils::PathTraversalAttackError,
+ Gitlab::PathTraversal::PathTraversalAttackError,
'Invalid path'
)
end
@@ -56,7 +56,7 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader do
it 'raises expected exception' do
expect { downloader.perform }.to raise_exception(
Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
- 'File size 26 MB exceeds limit of 25 MB'
+ 'File size 26 MiB exceeds limit of 25 MiB'
)
end
end
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 73ba49bf4ed..0f35c7ee0dc 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -160,7 +160,8 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
new_path: file_path,
old_path: file_path,
position_type: 'text',
- line_range: nil
+ line_range: nil,
+ ignore_whitespace_change: false
})
expect(note.note)
.to eq <<~NOTE
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 0b8b1922d94..6b3d4485ea5 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
+RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter, feature_category: :importers do
let(:repository) { double(:repository) }
let(:import_state) { double(:import_state) }
let(:client) { double(:client) }
@@ -23,6 +23,7 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
let(:project) do
double(
:project,
+ id: 1,
import_url: 'foo.git',
import_source: 'foo/bar',
repository_storage: 'foo',
@@ -204,6 +205,8 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
.to receive(:fetch_as_mirror)
.with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true)
+ expect(importer).to receive(:validate_repository_size!)
+
service = double
expect(Repositories::HousekeepingService)
.to receive(:new).with(project, :gc).and_return(service)
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
index 3e76b4ae698..be202733a89 100644
--- a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -104,7 +104,8 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
old_line: nil,
old_path: 'README.md',
position_type: 'text',
- start_sha: 'start'
+ start_sha: 'start',
+ ignore_whitespace_change: false
)
end
end
@@ -122,7 +123,8 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
new_line: nil,
old_path: 'README.md',
position_type: 'text',
- start_sha: 'start'
+ start_sha: 'start',
+ ignore_whitespace_change: false
)
end
end
@@ -144,6 +146,14 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
expect(note.line_code).to eq('8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_2_2')
end
+
+ context 'when comment on file' do
+ it 'generates line code for first line' do
+ note = described_class.new(diff_hunk: '', file_path: 'README.md', subject_type: 'file')
+
+ expect(note.line_code).to eq('8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_1_1')
+ end
+ end
end
describe '#note and #contains_suggestion?' do
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 2ac2fc1fd4b..4345df1b018 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
describe Gitlab::GlRepository::DESIGN do
it_behaves_like 'a repo type' do
let(:expected_repository) { project.design_repository }
- let(:expected_container) { project.design_management_repository }
+ let(:expected_container) { expected_repository.container }
let(:expected_id) { expected_container.id }
let(:expected_identifier) { "design-#{expected_id}" }
let(:expected_suffix) { '.design' }
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index 6e8997d51c3..1135cfc22ac 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -6,10 +6,6 @@ RSpec.describe Gitlab::GonHelper do
let(:helper) do
Class.new do
include Gitlab::GonHelper
-
- def current_user
- nil
- end
end.new
end
@@ -18,6 +14,7 @@ RSpec.describe Gitlab::GonHelper do
let(:https) { true }
before do
+ allow(helper).to receive(:current_user).and_return(nil)
allow(helper).to receive(:gon).and_return(gon)
stub_config_setting(https: https)
end
@@ -40,6 +37,24 @@ RSpec.describe Gitlab::GonHelper do
end
end
+ it 'sets no GitLab version' do
+ expect(gon).not_to receive(:version=)
+
+ helper.add_gon_variables
+ end
+
+ context 'when user is logged in' do
+ before do
+ allow(helper).to receive(:current_user).and_return(build_stubbed(:user))
+ end
+
+ it 'sets GitLab version' do
+ expect(gon).to receive(:version=).with(Gitlab::VERSION)
+
+ helper.add_gon_variables
+ end
+ end
+
context 'when sentry is configured' do
let(:clientside_dsn) { 'https://xxx@sentry.example.com/1' }
let(:environment) { 'staging' }
diff --git a/spec/lib/gitlab/graphql/generic_tracing_spec.rb b/spec/lib/gitlab/graphql/generic_tracing_spec.rb
index cd116225ecd..04fe7760f62 100644
--- a/spec/lib/gitlab/graphql/generic_tracing_spec.rb
+++ b/spec/lib/gitlab/graphql/generic_tracing_spec.rb
@@ -2,25 +2,47 @@
require 'spec_helper'
-RSpec.describe Gitlab::Graphql::GenericTracing do
+RSpec.describe Gitlab::Graphql::GenericTracing, feature_category: :application_performance do
let(:graphql_duration_seconds_histogram) { double('Gitlab::Metrics::NullMetric') }
- it 'updates graphql histogram with expected labels' do
- query = 'query { users { id } }'
- tracer = described_class.new
+ context 'when graphql_generic_tracing_metrics_deactivate is disabled' do
+ before do
+ stub_feature_flags(graphql_generic_tracing_metrics_deactivate: false)
+ end
+
+ it 'updates graphql histogram with expected labels' do
+ query = 'query { users { id } }'
+ tracer = described_class.new
+
+ allow(tracer)
+ .to receive(:graphql_duration_seconds)
+ .and_return(graphql_duration_seconds_histogram)
+
+ expect_metric('graphql.lex', 'lex')
+ expect_metric('graphql.parse', 'parse')
+ expect_metric('graphql.validate', 'validate')
+ expect_metric('graphql.analyze', 'analyze_multiplex')
+ expect_metric('graphql.execute', 'execute_query_lazy')
+ expect_metric('graphql.execute', 'execute_multiplex')
- allow(tracer)
- .to receive(:graphql_duration_seconds)
- .and_return(graphql_duration_seconds_histogram)
+ GitlabSchema.execute(query, context: { tracers: [tracer] })
+ end
+ end
+
+ context 'when graphql_generic_tracing_metrics_deactivate is enabled' do
+ it 'does not updates graphql histogram with expected labels' do
+ query = 'query { users { id } }'
+ tracer = described_class.new
- expect_metric('graphql.lex', 'lex')
- expect_metric('graphql.parse', 'parse')
- expect_metric('graphql.validate', 'validate')
- expect_metric('graphql.analyze', 'analyze_multiplex')
- expect_metric('graphql.execute', 'execute_query_lazy')
- expect_metric('graphql.execute', 'execute_multiplex')
+ allow(tracer)
+ .to receive(:graphql_duration_seconds)
+ .and_return(graphql_duration_seconds_histogram)
- GitlabSchema.execute(query, context: { tracers: [tracer] })
+ GitlabSchema.execute(query, context: { tracers: [tracer] })
+
+ expect(graphql_duration_seconds_histogram)
+ .not_to receive(:observe)
+ end
end
context "when labkit tracing is enabled" do
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index ec96a069b8f..1206a1c9131 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GroupSearchResults do
+RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do
# group creation calls GroupFinder, so need to create the group
# before so expect(GroupsFinder) check works
let_it_be(:group) { create(:group) }
@@ -46,6 +46,19 @@ RSpec.describe Gitlab::GroupSearchResults do
include_examples 'search results filtered by state'
end
+ describe '#projects' do
+ let(:scope) { 'projects' }
+ let(:query) { 'Test' }
+
+ describe 'filtering' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:unarchived_project) { create(:project, :public, group: group, name: 'Test1') }
+ let_it_be(:archived_project) { create(:project, :archived, :public, group: group, name: 'Test2') }
+
+ it_behaves_like 'search results filtered by archived'
+ end
+ end
+
describe 'user search' do
subject(:objects) { results.objects('users') }
diff --git a/spec/lib/gitlab/hotlinking_detector_spec.rb b/spec/lib/gitlab/hotlinking_detector_spec.rb
index 536d744c197..809c4a3c244 100644
--- a/spec/lib/gitlab/hotlinking_detector_spec.rb
+++ b/spec/lib/gitlab/hotlinking_detector_spec.rb
@@ -39,6 +39,9 @@ RSpec.describe Gitlab::HotlinkingDetector do
true | "text/css,*/*;q=0.1"
true | "text/css"
true | "text/css,*/*;q=0.1"
+
+ # Invalid MIME definition
+ true | "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2"
end
with_them do
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 57e4b4fc74b..133cd3b2f49 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -364,4 +364,77 @@ RSpec.describe Gitlab::HTTP do
end
end
end
+
+ describe 'silent mode', feature_category: :geo_replication do
+ before do
+ stub_full_request("http://example.org", method: :any)
+ stub_application_setting(silent_mode_enabled: silent_mode)
+ end
+
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it 'allows GET requests' do
+ expect { described_class.get('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows HEAD requests' do
+ expect { described_class.head('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows OPTIONS requests' do
+ expect { described_class.options('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks POST requests' do
+ expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'blocks PUT requests' do
+ expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'blocks DELETE requests' do
+ expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+
+ it 'logs blocked requests' do
+ expect(::Gitlab::AppJsonLogger).to receive(:info).with(
+ message: "Outbound HTTP request blocked",
+ outbound_http_request_method: 'Net::HTTP::Post',
+ silent_mode_enabled: true
+ )
+
+ expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
+ end
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it 'allows GET requests' do
+ expect { described_class.get('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows HEAD requests' do
+ expect { described_class.head('http://example.org') }.not_to raise_error
+ end
+
+ it 'allows OPTIONS requests' do
+ expect { described_class.options('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks POST requests' do
+ expect { described_class.post('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks PUT requests' do
+ expect { described_class.put('http://example.org') }.not_to raise_error
+ end
+
+ it 'blocks DELETE requests' do
+ expect { described_class.delete('http://example.org') }.not_to raise_error
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import/errors_spec.rb b/spec/lib/gitlab/import/errors_spec.rb
index f89cb36bbb4..3b45af0618b 100644
--- a/spec/lib/gitlab/import/errors_spec.rb
+++ b/spec/lib/gitlab/import/errors_spec.rb
@@ -40,7 +40,6 @@ RSpec.describe Gitlab::Import::Errors, feature_category: :importers do
"Author can't be blank",
"Project does not match noteable project",
"User can't be blank",
- "Awardable can't be blank",
"Name is not a valid emoji name"
)
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 34f9948b9dc..f6bdbc86cc5 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -94,6 +94,7 @@ notes:
- diff_note_positions
- review
- note_metadata
+- user_agent_detail
note_metadata:
- note
- email_participant
@@ -112,6 +113,7 @@ commit_notes:
- diff_note_positions
- review
- note_metadata
+- user_agent_detail
label_links:
- target
- label
@@ -239,6 +241,7 @@ merge_requests:
- compliance_violations
- created_environments
- predictions
+- user_agent_detail
external_pull_requests:
- project
merge_request_diff:
@@ -421,6 +424,7 @@ builds:
- dast_site_profile
- dast_scanner_profiles_build
- dast_scanner_profile
+- job_annotations
bridges:
- user
- pipeline
@@ -581,6 +585,7 @@ project:
- custom_issue_tracker_integration
- bugzilla_integration
- ewm_integration
+- clickup_integration
- external_wiki_integration
- mock_ci_integration
- mock_monitoring_integration
@@ -724,6 +729,7 @@ project:
- rpm_repository_files
- npm_metadata_caches
- packages_cleanup_policy
+- dependency_proxy_packages_setting
- alerting_setting
- project_setting
- webide_pipelines
@@ -760,6 +766,7 @@ project:
- freeze_periods
- pumble_integration
- webex_teams_integration
+- telegram_integration
- build_report_results
- vulnerability_statistic
- vulnerability_historical_statistics
@@ -798,6 +805,8 @@ project:
- analytics_dashboards_configuration_project
- analytics_dashboards_pointer
- design_management_repository
+- design_management_repository_state
+- compliance_standards_adherence
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index a6afd0a36ec..9766d5d6d59 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -177,30 +177,146 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer, feature: :subgroups, f
end
context 'group visibility levels' do
- let(:user) { create(:user) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ context 'when the @top_level_group is the destination_group' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
+ context "when visibility level is #{visibility_level}" do
+ let(:group) { create(:group, visibility_level) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+
+ before do
+ setup_import_export_config(filepath)
+ group_tree_restorer.restore
+ end
- before do
- setup_import_export_config(filepath)
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ end
+ end
+ end
- group_tree_restorer.restore
+ include_examples 'with visibility level', :public, [20, 10, 0]
+ include_examples 'with visibility level', :private, [0, 0, 0]
+ include_examples 'with visibility level', :internal, [10, 10, 0]
end
- shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
- context "when visibility level is #{visibility_level}" do
- let(:group) { create(:group, visibility_level) }
- let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+ context 'when the destination_group is the @top_level_group.parent' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+
+ shared_examples 'with visibility level' do |visibility_level, expected_visibilities, group_visibility|
+ context "when source level is #{visibility_level}" do
+ let(:parent) { create(:group, visibility_level) }
+ let(:group) { create(:group, visibility_level, parent: parent) }
+ let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
- it "imports all subgroups as #{visibility_level}" do
- expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ it "imports all subgroups as #{visibility_level}" do
+ expect(group.visibility_level).to eq(group_visibility)
+ expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
+ end
end
end
+
+ include_examples 'with visibility level', :public, [20, 10, 0], 20
+ include_examples 'with visibility level', :private, [0, 0, 0], 0
+ include_examples 'with visibility level', :internal, [10, 10, 0], 10
+ end
+
+ context 'when the visibility level is restricted' do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:group) { create(:group, :internal) }
+ let(:filepath) { "group_exports/visibility_levels/internal" }
+
+ before do
+ setup_import_export_config(filepath)
+ Gitlab::CurrentSettings.restricted_visibility_levels = [10]
+ group_tree_restorer.restore
+ end
+
+ after do
+ Gitlab::CurrentSettings.restricted_visibility_levels = []
+ end
+
+ it 'updates the visibility_level' do
+ expect(group.children.map(&:visibility_level)).to match_array([0, 0, 0])
+ end
end
+ end
+
+ context 'when there are nested subgroups' do
+ let(:filepath) { "group_exports/visibility_levels/nested_subgroups" }
- include_examples 'with visibility level', :public, [20, 10, 0]
- include_examples 'with visibility level', :private, [0, 0, 0]
- include_examples 'with visibility level', :internal, [10, 10, 0]
+ context "when destination level is :public" do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:parent) { create(:group, :public) }
+ let(:group) { create(:group, :public, parent: parent) }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
+
+ it "imports all subgroups with original visibility_level" do
+ expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ expect(group.descendants.map(&:visibility_level))
+ .to match_array([0, 0, 0, 10, 10, 10, 20, 20])
+ end
+ end
+
+ context "when destination level is :internal" do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:parent) { create(:group, :internal) }
+ let(:group) { create(:group, :internal, parent: parent) }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
+
+ it "imports non-public subgroups with original level and public subgroups as internal" do
+ expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ expect(group.descendants.map(&:visibility_level))
+ .to match_array([0, 0, 0, 10, 10, 10, 10, 10])
+ end
+ end
+
+ context "when destination level is :private" do
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(group) }
+ let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
+ let(:parent) { create(:group, :private) }
+ let(:group) { create(:group, :private, parent: parent) }
+
+ before do
+ setup_import_export_config(filepath)
+ parent.add_maintainer(user)
+ group_tree_restorer.restore
+ end
+
+ it "imports all subgroups as private" do
+ expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ expect(group.descendants.map(&:visibility_level))
+ .to match_array([0, 0, 0, 0, 0, 0, 0, 0])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index d8a4230e5da..1d3fc764b50 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -7,7 +7,7 @@ require 'spec_helper'
# Fixture JSONs we use for testing Import such as
# `spec/fixtures/lib/gitlab/import_export/complex/project.json`
# should include these relations being non-empty.
-RSpec.describe 'Test coverage of the Project Import' do
+RSpec.describe 'Test coverage of the Project Import', feature_category: :importers do
include ConfigurationHelper
# `muted_relations` is a technical debt.
@@ -18,7 +18,6 @@ RSpec.describe 'Test coverage of the Project Import' do
let(:muted_relations) do
%w[
project.milestones.events.push_event_payload
- project.issues.events
project.issues.events.push_event_payload
project.issues.notes.events
project.issues.notes.events.push_event_payload
@@ -53,19 +52,23 @@ RSpec.describe 'Test coverage of the Project Import' do
project.boards.lists.label.priorities
project.service_desk_setting
project.security_setting
+ project.push_rule
+ project.approval_rules
+ project.approval_rules.approval_project_rules_protected_branches
+ project.approval_rules.approval_project_rules_users
].freeze
end
- # A list of JSON fixture files we use to test Import.
- # Most of the relations are present in `complex/project.json`
+ # A list of project tree fixture files we use to test Import.
+ # Most of the relations are present in `complex/tree`
# which is our main fixture.
- let(:project_json_fixtures) do
+ let(:project_tree_fixtures) do
[
- 'spec/fixtures/lib/gitlab/import_export/complex/project.json',
- 'spec/fixtures/lib/gitlab/import_export/group/project.json',
- 'spec/fixtures/lib/gitlab/import_export/light/project.json',
- 'spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json',
- 'spec/fixtures/lib/gitlab/import_export/designs/project.json'
+ 'spec/fixtures/lib/gitlab/import_export/complex/tree',
+ 'spec/fixtures/lib/gitlab/import_export/group/tree',
+ 'spec/fixtures/lib/gitlab/import_export/light/tree',
+ 'spec/fixtures/lib/gitlab/import_export/milestone-iid/tree',
+ 'spec/fixtures/lib/gitlab/import_export/designs/tree'
].freeze
end
@@ -82,16 +85,30 @@ RSpec.describe 'Test coverage of the Project Import' do
end
def tested_relations
- project_json_fixtures.flat_map(&method(:relations_from_json)).to_set
+ project_tree_fixtures.flat_map(&method(:relations_from_tree)).to_set
end
- def relations_from_json(json_file)
- json = Gitlab::Json.parse(File.read(json_file))
+ def relations_from_tree(json_tree_path)
+ json = convert_tree_to_json(json_tree_path)
[].tap { |res| gather_relations({ project: json }, res, []) }
.map { |relation_names| relation_names.join('.') }
end
+ def convert_tree_to_json(json_tree_path)
+ json = Gitlab::Json.parse(File.read(File.join(json_tree_path, 'project.json')))
+
+ Dir["#{json_tree_path}/project/*.ndjson"].each do |ndjson|
+ relation_name = File.basename(ndjson, '.ndjson')
+ json[relation_name] = []
+ File.foreach(ndjson) do |line|
+ json[relation_name] << Gitlab::Json.parse(line)
+ end
+ end
+
+ json
+ end
+
def gather_relations(item, res, path)
case item
when Hash
@@ -112,7 +129,7 @@ RSpec.describe 'Test coverage of the Project Import' do
These relations seem to be added recently and
they expected to be covered in our Import specs: #{not_tested_relations}.
- To do that, expand one of the files listed in `project_json_fixtures`
+ To do that, expand one of the files listed in `project_tree_fixtures`
(or expand the list if you consider adding a new fixture file).
After that, add a new spec into
diff --git a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
deleted file mode 100644
index c2c50751c3f..00000000000
--- a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::ImportExport::LegacyRelationTreeSaver do
- let(:exportable) { create(:group) }
- let(:relation_tree_saver) { described_class.new }
- let(:tree) { {} }
-
- describe '#serialize' do
- let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
-
- it 'uses FastHashSerializer' do
- expect(Gitlab::ImportExport::FastHashSerializer)
- .to receive(:new)
- .with(exportable, tree)
- .and_return(serializer)
-
- expect(serializer).to receive(:execute)
-
- relation_tree_saver.serialize(exportable, tree)
- end
- end
-end
diff --git a/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb b/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
index 6e5be0b2829..cb8ac088493 100644
--- a/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
+++ b/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::ImportExport::RecursiveMergeFolders do
Dir.mktmpdir do |tmpdir|
expect do
described_class.merge("#{tmpdir}/../", tmpdir)
- end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
end
end
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::ImportExport::RecursiveMergeFolders do
Dir.mktmpdir do |tmpdir|
expect do
described_class.merge(tmpdir, "#{tmpdir}/../")
- end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError)
end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index faf345e8f78..abdd8741377 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -704,6 +704,7 @@ ProjectFeature:
- releases_access_level
- monitor_access_level
- infrastructure_access_level
+- model_experiments_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -932,11 +933,6 @@ DesignManagement::Version:
- created_at
- sha
- author_id
-DesignManagement::Repository:
-- id
-- project_id
-- created_at
-- updated_at
ZoomMeeting:
- id
- project_id
diff --git a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
index 892b8e69124..ddb5245f825 100644
--- a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
require 'support/helpers/rails_helpers'
require 'rspec-parameterized'
-RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
+RSpec.describe Gitlab::Instrumentation::RedisClusterValidator, feature_category: :scalability do
include RailsHelpers
describe '.validate' do
@@ -90,7 +90,7 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
described_class.allow_cross_slot_commands do
described_class.validate([[:mget, 'foo', 'bar']])
end
- ).to eq({ valid: true, key_count: 2, command_name: 'MGET', allowed: true })
+ ).to eq({ valid: false, key_count: 2, command_name: 'MGET', allowed: true })
end
it 'allows nested invocation' do
@@ -102,7 +102,7 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
described_class.validate([[:mget, 'foo', 'bar']])
end
- ).to eq({ valid: true, key_count: 2, command_name: 'MGET', allowed: true })
+ ).to eq({ valid: false, key_count: 2, command_name: 'MGET', allowed: true })
end
end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index be6586ca610..f3c240317c8 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'rspec-parameterized'
require 'support/helpers/rails_helpers'
-RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_shared_state, :request_store do
+RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_shared_state, :request_store, feature_category: :scalability do
using RSpec::Parameterized::TableSyntax
describe 'read and write' do
@@ -115,6 +115,15 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
end
end
+ it 'does not count allowed non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ Gitlab::Redis::SharedState.with { |redis| redis.call(:get, 'bar') }
+ end
+ end
+
it 'skips count for non-cross-slot requests' do
expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
diff --git a/spec/lib/gitlab/instrumentation/redis_spec.rb b/spec/lib/gitlab/instrumentation/redis_spec.rb
index 3e02eadba4b..1b7774bc229 100644
--- a/spec/lib/gitlab/instrumentation/redis_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_spec.rb
@@ -35,13 +35,13 @@ RSpec.describe Gitlab::Instrumentation::Redis do
# will be an extra SELECT command to choose the right database. We
# don't want to make the spec less precise, so we force that to
# happen (if needed) first, then clear the counts.
- Gitlab::Redis::Cache.with { |redis| redis.info }
+ Gitlab::Redis::Sessions.with { |redis| redis.info }
RequestStore.clear!
stub_rails_env('staging') # to avoid raising CrossSlotError
- Gitlab::Redis::Cache.with { |redis| redis.mset('cache-test', 321, 'cache-test-2', 321) }
+ Gitlab::Redis::Sessions.with { |redis| redis.mset('cache-test', 321, 'cache-test-2', 321) }
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- Gitlab::Redis::Cache.with { |redis| redis.mget('cache-test', 'cache-test-2') }
+ Gitlab::Redis::Sessions.with { |redis| redis.mget('cache-test', 'cache-test-2') }
end
Gitlab::Redis::SharedState.with { |redis| redis.set('shared-state-test', 123) }
end
@@ -56,13 +56,13 @@ RSpec.describe Gitlab::Instrumentation::Redis do
redis_read_bytes: be >= 0,
redis_write_bytes: be >= 0,
- # Cache results
- redis_cache_calls: 2,
- redis_cache_cross_slot_calls: 1,
- redis_cache_allowed_cross_slot_calls: 1,
- redis_cache_duration_s: be >= 0,
- redis_cache_read_bytes: be >= 0,
- redis_cache_write_bytes: be >= 0,
+ # Queues results
+ redis_sessions_calls: 2,
+ redis_sessions_cross_slot_calls: 1,
+ redis_sessions_allowed_cross_slot_calls: 1,
+ redis_sessions_duration_s: be >= 0,
+ redis_sessions_read_bytes: be >= 0,
+ redis_sessions_write_bytes: be >= 0,
# Shared state results
redis_shared_state_calls: 1,
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 8a88328e0c1..698c8a37d48 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -41,9 +41,9 @@ RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cac
context 'when Redis calls are made' do
it 'adds Redis data and omits Gitaly data' do
stub_rails_env('staging') # to avoid raising CrossSlotError
- Gitlab::Redis::Cache.with { |redis| redis.mset('test-cache', 123, 'test-cache2', 123) }
+ Gitlab::Redis::Sessions.with { |redis| redis.mset('test-cache', 123, 'test-cache2', 123) }
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- Gitlab::Redis::Cache.with { |redis| redis.mget('cache-test', 'cache-test-2') }
+ Gitlab::Redis::Sessions.with { |redis| redis.mget('cache-test', 'cache-test-2') }
end
Gitlab::Redis::Queues.with { |redis| redis.set('test-queues', 321) }
@@ -63,13 +63,13 @@ RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cac
expect(payload[:redis_queues_read_bytes]).to be >= 0
expect(payload[:redis_queues_write_bytes]).to be >= 0
- # Cache payload
- expect(payload[:redis_cache_calls]).to eq(2)
- expect(payload[:redis_cache_cross_slot_calls]).to eq(1)
- expect(payload[:redis_cache_allowed_cross_slot_calls]).to eq(1)
- expect(payload[:redis_cache_duration_s]).to be >= 0
- expect(payload[:redis_cache_read_bytes]).to be >= 0
- expect(payload[:redis_cache_write_bytes]).to be >= 0
+ # Sessions payload
+ expect(payload[:redis_sessions_calls]).to eq(2)
+ expect(payload[:redis_sessions_cross_slot_calls]).to eq(1)
+ expect(payload[:redis_sessions_allowed_cross_slot_calls]).to eq(1)
+ expect(payload[:redis_sessions_duration_s]).to be >= 0
+ expect(payload[:redis_sessions_read_bytes]).to be >= 0
+ expect(payload[:redis_sessions_write_bytes]).to be >= 0
# Gitaly
expect(payload[:gitaly_calls]).to be_nil
diff --git a/spec/lib/gitlab/internal_events_spec.rb b/spec/lib/gitlab/internal_events_spec.rb
new file mode 100644
index 00000000000..f23979fc56a
--- /dev/null
+++ b/spec/lib/gitlab/internal_events_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_analytics do
+ include TrackingHelpers
+ include SnowplowHelpers
+
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_snowplow)
+ allow(fake_snowplow).to receive(:event)
+ end
+
+ def expect_redis_hll_tracking(event_name)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
+ .with(event_name, anything)
+ end
+
+ def expect_snowplow_tracking(event_name)
+ service_ping_context = Gitlab::Tracking::ServicePingContext
+ .new(data_source: :redis_hll, event: event_name)
+ .to_context
+ .to_json
+
+ expect(SnowplowTracker::SelfDescribingJson).to have_received(:new)
+ .with(service_ping_context[:schema], service_ping_context[:data]).at_least(:once)
+
+ # Add test for creation of both contexts
+ contexts = [instance_of(SnowplowTracker::SelfDescribingJson), instance_of(SnowplowTracker::SelfDescribingJson)]
+
+ expect(fake_snowplow).to have_received(:event)
+ .with('InternalEventTracking', event_name, context: contexts)
+ end
+
+ let_it_be(:user) { build(:user) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:namespace) { project.namespace }
+
+ let(:fake_snowplow) { instance_double(Gitlab::Tracking::Destinations::Snowplow) }
+ let(:event_name) { 'g_edit_by_web_ide' }
+
+ it 'updates both RedisHLL and Snowplow', :aggregate_failures do
+ params = { user_id: user.id, project_id: project.id, namespace_id: namespace.id }
+ described_class.track_event(event_name, **params)
+
+ expect_redis_hll_tracking(event_name)
+ expect_snowplow_tracking(event_name) # Add test for arguments
+ end
+
+ it 'rescues error' do
+ params = { user_id: user.id, project_id: project.id, namespace_id: namespace.id }
+ error = StandardError.new("something went wrong")
+ allow(fake_snowplow).to receive(:event).and_raise(error)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ .with(
+ error,
+ snowplow_category: 'InternalEventTracking',
+ snowplow_action: event_name
+ )
+
+ expect { described_class.track_event(event_name, **params) }.not_to raise_error
+ end
+end
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index c0c1a28b9ff..64a5758d152 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::JiraImport do
context 'when Jira connection is not valid' do
before do
WebMock.stub_request(:get, 'https://jira.example.com/rest/api/2/serverInfo')
- .to_raise(JIRA::HTTPError.new(double(message: 'Some failure.')))
+ .to_raise(JIRA::HTTPError.new(double(message: 'Some failure.', code: '400')))
end
it_behaves_like 'raise Jira import error', 'Unable to connect to the Jira instance. Please check your Jira integration configuration.'
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
deleted file mode 100644
index f4f6624bae9..00000000000
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ /dev/null
@@ -1,551 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/RedundantFetchBlock
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::JsonCache do
- let_it_be(:broadcast_message) { create(:broadcast_message) }
-
- let(:backend) { double('backend').as_null_object }
- let(:namespace) { 'geo' }
- let(:key) { 'foo' }
- let(:expanded_key) { "#{namespace}:#{key}:#{Gitlab.revision}" }
-
- subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
-
- describe '#active?' do
- context 'when backend respond to active? method' do
- it 'delegates to the underlying cache implementation' do
- backend = double('backend', active?: false)
-
- cache = described_class.new(namespace: namespace, backend: backend)
-
- expect(cache.active?).to eq(false)
- end
- end
-
- context 'when backend does not respond to active? method' do
- it 'returns true' do
- backend = double('backend')
-
- cache = described_class.new(namespace: namespace, backend: backend)
-
- expect(cache.active?).to eq(true)
- end
- end
- end
-
- describe '#cache_key' do
- using RSpec::Parameterized::TableSyntax
-
- where(:namespace, :cache_key_strategy, :expanded_key) do
- nil | :revision | "#{key}:#{Gitlab.revision}"
- nil | :version | "#{key}:#{Gitlab::VERSION}:#{Rails.version}"
- namespace | :revision | "#{namespace}:#{key}:#{Gitlab.revision}"
- namespace | :version | "#{namespace}:#{key}:#{Gitlab::VERSION}:#{Rails.version}"
- end
-
- with_them do
- let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: cache_key_strategy) }
-
- subject { cache.cache_key(key) }
-
- it { is_expected.to eq expanded_key }
- end
-
- context 'when cache_key_strategy is unknown' do
- let(:cache) { described_class.new(namespace: namespace, cache_key_strategy: 'unknown') }
-
- it 'raises KeyError' do
- expect { cache.cache_key('key') }.to raise_error(KeyError)
- end
- end
- end
-
- describe '#namespace' do
- it 'defaults to nil' do
- cache = described_class.new
- expect(cache.namespace).to be_nil
- end
- end
-
- describe '#strategy_key_component' do
- subject { cache.strategy_key_component }
-
- it 'defaults to Gitlab.revision' do
- expect(described_class.new.strategy_key_component).to eq Gitlab.revision
- end
-
- context 'when cache_key_strategy is :revision' do
- let(:cache) { described_class.new(cache_key_strategy: :revision) }
-
- it { is_expected.to eq Gitlab.revision }
- end
-
- context 'when cache_key_strategy is :version' do
- let(:cache) { described_class.new(cache_key_strategy: :version) }
-
- it { is_expected.to eq [Gitlab::VERSION, Rails.version] }
- end
-
- context 'when cache_key_strategy is invalid' do
- let(:cache) { described_class.new(cache_key_strategy: 'unknown') }
-
- it 'raises KeyError' do
- expect { subject }.to raise_error(KeyError)
- end
- end
- end
-
- describe '#expire' do
- it 'expires the given key from the cache' do
- cache.expire(key)
-
- expect(backend).to have_received(:delete).with(expanded_key)
- end
- end
-
- describe '#read' do
- it 'reads the given key from the cache' do
- cache.read(key)
-
- expect(backend).to have_received(:read).with(expanded_key)
- end
-
- it 'returns the cached value when there is data in the cache with the given key' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return("true")
-
- expect(cache.read(key)).to eq(true)
- end
-
- it 'returns nil when there is no data in the cache with the given key' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(nil)
-
- expect(Gitlab::Json).not_to receive(:parse)
- expect(cache.read(key)).to be_nil
- end
-
- context 'when the cached value is true' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(true)
-
- expect(Gitlab::Json).to receive(:parse).with("true").and_call_original
- expect(cache.read(key, BroadcastMessage)).to eq(true)
- end
- end
-
- context 'when the cached value is false' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(false)
-
- expect(Gitlab::Json).to receive(:parse).with("false").and_call_original
- expect(cache.read(key, BroadcastMessage)).to eq(false)
- end
- end
-
- context 'when the cached value is a JSON true value' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return("true")
-
- expect(cache.read(key, BroadcastMessage)).to eq(true)
- end
- end
-
- context 'when the cached value is a JSON false value' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return("false")
-
- expect(cache.read(key, BroadcastMessage)).to eq(false)
- end
- end
-
- context 'when the cached value is a hash' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.to_json)
-
- expect(cache.read(key, BroadcastMessage)).to eq(broadcast_message)
- end
-
- it 'returns nil when klass is nil' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.to_json)
-
- expect(cache.read(key)).to be_nil
- end
-
- it 'gracefully handles bad cached entry' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{')
-
- expect(cache.read(key, BroadcastMessage)).to be_nil
- end
-
- it 'gracefully handles an empty hash' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{}')
-
- expect(cache.read(key, BroadcastMessage)).to be_a(BroadcastMessage)
- end
-
- it 'gracefully handles unknown attributes' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.merge(unknown_attribute: 1).to_json)
-
- expect(cache.read(key, BroadcastMessage)).to be_nil
- end
-
- it 'gracefully handles excluded fields from attributes during serialization' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.except("message_html").to_json)
-
- result = cache.read(key, BroadcastMessage)
-
- BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
- expect(result.public_send(field)).to be_nil
- end
- end
- end
-
- context 'when the cached value is an array' do
- it 'parses the cached value' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return([broadcast_message].to_json)
-
- expect(cache.read(key, BroadcastMessage)).to eq([broadcast_message])
- end
-
- it 'returns an empty array when klass is nil' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return([broadcast_message].to_json)
-
- expect(cache.read(key)).to eq([])
- end
-
- it 'gracefully handles bad cached entry' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('[')
-
- expect(cache.read(key, BroadcastMessage)).to be_nil
- end
-
- it 'gracefully handles an empty array' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('[]')
-
- expect(cache.read(key, BroadcastMessage)).to eq([])
- end
-
- it 'gracefully handles unknown attributes' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return([{ unknown_attribute: 1 }, broadcast_message.attributes].to_json)
-
- expect(cache.read(key, BroadcastMessage)).to eq([broadcast_message])
- end
- end
- end
-
- describe '#write' do
- it 'writes value to the cache with the given key' do
- cache.write(key, true)
-
- expect(backend).to have_received(:write).with(expanded_key, "true", nil)
- end
-
- it 'writes a string containing a JSON representation of the value to the cache' do
- cache.write(key, broadcast_message)
-
- expect(backend).to have_received(:write)
- .with(expanded_key, broadcast_message.to_json, nil)
- end
-
- it 'passes options the underlying cache implementation' do
- cache.write(key, true, expires_in: 15.seconds)
-
- expect(backend).to have_received(:write)
- .with(expanded_key, "true", expires_in: 15.seconds)
- end
-
- it 'passes options the underlying cache implementation when options is empty' do
- cache.write(key, true, {})
-
- expect(backend).to have_received(:write)
- .with(expanded_key, "true", {})
- end
-
- it 'passes options the underlying cache implementation when options is nil' do
- cache.write(key, true, nil)
-
- expect(backend).to have_received(:write)
- .with(expanded_key, "true", nil)
- end
- end
-
- describe '#fetch', :use_clean_rails_memory_store_caching do
- let(:backend) { Rails.cache }
-
- it 'requires a block' do
- expect { cache.fetch(key) }.to raise_error(LocalJumpError)
- end
-
- it 'passes options the underlying cache implementation' do
- expect(backend).to receive(:write)
- .with(expanded_key, "true", { expires_in: 15.seconds })
-
- cache.fetch(key, { expires_in: 15.seconds }) { true }
- end
-
- context 'when the given key does not exist in the cache' do
- context 'when the result of the block is truthy' do
- it 'returns the result of the block' do
- result = cache.fetch(key) { true }
-
- expect(result).to eq(true)
- end
-
- it 'caches the value' do
- expect(backend).to receive(:write).with(expanded_key, "true", {})
-
- cache.fetch(key) { true }
- end
- end
-
- context 'when the result of the block is false' do
- it 'returns the result of the block' do
- result = cache.fetch(key) { false }
-
- expect(result).to eq(false)
- end
-
- it 'caches the value' do
- expect(backend).to receive(:write).with(expanded_key, "false", {})
-
- cache.fetch(key) { false }
- end
- end
-
- context 'when the result of the block is nil' do
- it 'returns the result of the block' do
- result = cache.fetch(key) { nil }
-
- expect(result).to eq(nil)
- end
-
- it 'caches the value' do
- expect(backend).to receive(:write).with(expanded_key, "null", {})
-
- cache.fetch(key) { nil }
- end
- end
- end
-
- context 'when the given key exists in the cache' do
- context 'when the cached value is a hash' do
- before do
- backend.write(expanded_key, broadcast_message.to_json)
- end
-
- it 'parses the cached value' do
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result).to eq(broadcast_message)
- end
-
- it 'decodes enums correctly' do
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result.broadcast_type).to eq(broadcast_message.broadcast_type)
- end
-
- context 'when the cached value is an instance of ActiveRecord::Base' do
- it 'returns a persisted record when id is set' do
- backend.write(expanded_key, broadcast_message.to_json)
-
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result).to be_persisted
- end
-
- it 'returns a new record when id is nil' do
- backend.write(expanded_key, build(:broadcast_message).to_json)
-
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result).to be_new_record
- end
-
- it 'returns a new record when id is missing' do
- backend.write(expanded_key, build(:broadcast_message).attributes.except('id').to_json)
-
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result).to be_new_record
- end
-
- it 'gracefully handles bad cached entry' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{')
-
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result).to eq 'block result'
- end
-
- it 'gracefully handles an empty hash' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return('{}')
-
- expect(cache.fetch(key, as: BroadcastMessage)).to be_a(BroadcastMessage)
- end
-
- it 'gracefully handles unknown attributes' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.merge(unknown_attribute: 1).to_json)
-
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result).to eq 'block result'
- end
-
- it 'gracefully handles excluded fields from attributes during serialization' do
- allow(backend).to receive(:read)
- .with(expanded_key)
- .and_return(broadcast_message.attributes.except("message_html").to_json)
-
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
- expect(result.public_send(field)).to be_nil
- end
- end
- end
-
- it "returns the result of the block when 'as' option is nil" do
- result = cache.fetch(key, as: nil) { 'block result' }
-
- expect(result).to eq('block result')
- end
-
- it "returns the result of the block when 'as' option is missing" do
- result = cache.fetch(key) { 'block result' }
-
- expect(result).to eq('block result')
- end
- end
-
- context 'when the cached value is a array' do
- before do
- backend.write(expanded_key, [broadcast_message].to_json)
- end
-
- it 'parses the cached value' do
- result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
-
- expect(result).to eq([broadcast_message])
- end
-
- it "returns an empty array when 'as' option is nil" do
- result = cache.fetch(key, as: nil) { 'block result' }
-
- expect(result).to eq([])
- end
-
- it "returns an empty array when 'as' option is not informed" do
- result = cache.fetch(key) { 'block result' }
-
- expect(result).to eq([])
- end
- end
-
- context 'when the cached value is true' do
- before do
- backend.write(expanded_key, "true")
- end
-
- it 'returns the cached value' do
- result = cache.fetch(key) { 'block result' }
-
- expect(result).to eq(true)
- end
-
- it 'does not execute the block' do
- expect { |block| cache.fetch(key, &block) }.not_to yield_control
- end
-
- it 'does not write to the cache' do
- expect(backend).not_to receive(:write)
-
- cache.fetch(key) { 'block result' }
- end
- end
-
- context 'when the cached value is false' do
- before do
- backend.write(expanded_key, "false")
- end
-
- it 'returns the cached value' do
- result = cache.fetch(key) { 'block result' }
-
- expect(result).to eq(false)
- end
-
- it 'does not execute the block' do
- expect { |block| cache.fetch(key, &block) }.not_to yield_control
- end
-
- it 'does not write to the cache' do
- expect(backend).not_to receive(:write)
-
- cache.fetch(key) { 'block result' }
- end
- end
-
- context 'when the cached value is nil' do
- before do
- backend.write(expanded_key, "null")
- end
-
- it 'returns the result of the block' do
- result = cache.fetch(key) { 'block result' }
-
- expect(result).to eq('block result')
- end
-
- it 'writes the result of the block to the cache' do
- expect(backend).to receive(:write)
- .with(expanded_key, 'block result'.to_json, {})
-
- cache.fetch(key) { 'block result' }
- end
- end
- end
- end
-end
-# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/lib/gitlab/lets_encrypt/challenge_spec.rb b/spec/lib/gitlab/lets_encrypt/challenge_spec.rb
index d853275520b..2418e93f105 100644
--- a/spec/lib/gitlab/lets_encrypt/challenge_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/challenge_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt::Challenge do
+RSpec.describe ::Gitlab::LetsEncrypt::Challenge, feature_category: :pages do
include LetsEncryptHelpers
let(:challenge) { described_class.new(acme_challenge_double) }
diff --git a/spec/lib/gitlab/lets_encrypt/client_spec.rb b/spec/lib/gitlab/lets_encrypt/client_spec.rb
index 1baf8749532..e109cf93f3f 100644
--- a/spec/lib/gitlab/lets_encrypt/client_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/client_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt::Client do
+RSpec.describe ::Gitlab::LetsEncrypt::Client, feature_category: :pages do
include LetsEncryptHelpers
let(:client) { described_class.new }
@@ -33,7 +33,7 @@ RSpec.describe ::Gitlab::LetsEncrypt::Client do
saved_private_key = Gitlab::CurrentSettings.lets_encrypt_private_key
- expect(saved_private_key).to be
+ expect(saved_private_key).to be_present
expect(Acme::Client).to have_received(:new).with(
hash_including(private_key: eq_pem(saved_private_key))
)
diff --git a/spec/lib/gitlab/lets_encrypt/order_spec.rb b/spec/lib/gitlab/lets_encrypt/order_spec.rb
index 419f9e28871..734afab6bb1 100644
--- a/spec/lib/gitlab/lets_encrypt/order_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/order_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt::Order do
+RSpec.describe ::Gitlab::LetsEncrypt::Order, feature_category: :pages do
include LetsEncryptHelpers
let(:acme_order) { acme_order_double }
diff --git a/spec/lib/gitlab/lets_encrypt_spec.rb b/spec/lib/gitlab/lets_encrypt_spec.rb
index 7597359847b..f07eac1e09a 100644
--- a/spec/lib/gitlab/lets_encrypt_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::LetsEncrypt do
+RSpec.describe ::Gitlab::LetsEncrypt, feature_category: :pages do
include LetsEncryptHelpers
before do
diff --git a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
index 81d423598f2..2246272d3af 100644
--- a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
@@ -27,10 +27,14 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
end
let(:markdown) { '`Foo`' }
- let(:html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Foo</code></p>' }
+ let(:html) { '<p dir="auto"><code>Foo</code></p>' }
let(:updated_markdown) { '`Bar`' }
- let(:updated_html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Bar</code></p>' }
+ let(:updated_html) { '<p dir="auto"><code>Bar</code></p>' }
+
+ before do
+ stub_commonmark_sourcepos_disabled
+ end
context 'an unchanged markdown field' do
let(:thing) { klass.new(project_id: project.id, namespace_id: project.project_namespace_id, title: markdown) }
diff --git a/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb b/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
index 8e75009099d..da5431a370b 100644
--- a/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
@@ -65,7 +65,9 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Extension, :clean_gitlab_redis_cach
Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ times = Gitlab::Redis::ClusterUtil.cluster?(redis) ? 2 : 1
+
+ expect_next_instances_of(Redis::PipelinedConnection, times) do |pipeline|
expect(pipeline).to receive(:mapped_hmget).once.and_call_original
end
end
@@ -82,9 +84,13 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Extension, :clean_gitlab_redis_cach
end
describe "#refresh_markdown_cache!" do
+ before do
+ stub_commonmark_sourcepos_disabled
+ end
+
it "stores the value in redis" do
expected_results = { "title_html" => "`Hello`",
- "description_html" => "<p data-sourcepos=\"1:1-1:7\" dir=\"auto\"><code>World</code></p>",
+ "description_html" => "<p dir=\"auto\"><code>World</code></p>",
"cached_markdown_version" => cache_version.to_s }
thing.refresh_markdown_cache!
@@ -101,7 +107,7 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Extension, :clean_gitlab_redis_cach
thing.refresh_markdown_cache!
expect(thing.title_html).to eq('`Hello`')
- expect(thing.description_html).to eq("<p data-sourcepos=\"1:1-1:7\" dir=\"auto\"><code>World</code></p>")
+ expect(thing.description_html).to eq("<p dir=\"auto\"><code>World</code></p>")
expect(thing.cached_markdown_version).to eq(cache_version)
end
end
diff --git a/spec/lib/gitlab/merge_requests/message_generator_spec.rb b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
index ac9a9aa2897..df8804d38d4 100644
--- a/spec/lib/gitlab/merge_requests/message_generator_spec.rb
+++ b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
@@ -77,6 +77,25 @@ RSpec.describe Gitlab::MergeRequests::MessageGenerator, feature_category: :code_
end
end
+ context 'when project has commit template with title and local reference' do
+ let(:merge_request) do
+ double(
+ :merge_request,
+ title: 'Fixes',
+ target_project: project,
+ to_reference: '!123',
+ metrics: nil,
+ merge_user: nil
+ )
+ end
+
+ let(message_template_name) { '%{title} (%{local_reference})' }
+
+ it 'evaluates only necessary variables' do
+ expect(result_message).to eq 'Fixes (!123)'
+ end
+ end
+
context 'when project has commit template with closed issues' do
let(message_template_name) { <<~MSG.rstrip }
Merge branch '%{source_branch}' into '%{target_branch}'
diff --git a/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb b/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
index 58740278425..0d6ce68a7f8 100644
--- a/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe ::Gitlab::Metrics::LooseForeignKeysSlis do
# config/database.yml and the specs need to work for all configurations. That
# means this assertion is a copy of the implementation.
let(:possible_labels) do
- ::Gitlab::Database.db_config_names.map do |db_config_name|
+ ::Gitlab::Database.db_config_names(with_schema: :gitlab_shared).map do |db_config_name|
{
db_config_name: db_config_name,
feature_category: :database
diff --git a/spec/lib/gitlab/metrics/rails_slis_spec.rb b/spec/lib/gitlab/metrics/rails_slis_spec.rb
index 32d3b7581f1..ef996f61082 100644
--- a/spec/lib/gitlab/metrics/rails_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/rails_slis_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Metrics::RailsSlis, feature_category: :error_budgets do
[
{
endpoint_id: "ProjectsController#index",
- feature_category: :projects,
+ feature_category: :groups_and_projects,
request_urgency: :default
}
]
diff --git a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
index 2d4c6d1cc56..fe5264a1ccb 100644
--- a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
@@ -10,6 +10,25 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
let(:store_label) { 'CustomStore' }
let(:event) { double(:event, duration: 15.2, payload: { key: %w[a b c], store: store }) }
+ context 'when receiving multiple instrumentation hits in a transaction' do
+ before do
+ allow(subscriber).to receive(:current_transaction)
+ .and_return(transaction)
+ end
+
+ it 'does not raise InvalidLabelSetError error' do
+ expect do
+ subscriber.cache_read(event)
+ subscriber.cache_read_multi(event)
+ subscriber.cache_write(event)
+ subscriber.cache_delete(event)
+ subscriber.cache_exist?(event)
+ subscriber.cache_fetch_hit(event)
+ subscriber.cache_generate(event)
+ end.not_to raise_error
+ end
+ end
+
describe '#cache_read' do
it 'increments the cache_read duration' do
expect(subscriber).to receive(:observe)
@@ -32,7 +51,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'does not increment cache read miss total' do
expect(transaction).not_to receive(:increment)
- .with(:gitlab_cache_misses_total, 1)
+ .with(:gitlab_cache_misses_total, 1, { store: store_label })
subscriber.cache_read(event)
end
@@ -44,7 +63,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'increments the cache_read_miss total' do
expect(transaction).to receive(:increment)
- .with(:gitlab_cache_misses_total, 1)
+ .with(:gitlab_cache_misses_total, 1, { store: store_label })
expect(transaction).to receive(:increment)
.with(any_args).at_least(1) # Other calls
@@ -56,7 +75,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'does not increment cache read miss total' do
expect(transaction).not_to receive(:increment)
- .with(:gitlab_cache_misses_total, 1)
+ .with(:gitlab_cache_misses_total, 1, { store: store_label })
subscriber.cache_read(event)
end
@@ -145,7 +164,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'increments the cache_read_hit count' do
expect(transaction).to receive(:increment)
- .with(:gitlab_transaction_cache_read_hit_count_total, 1)
+ .with(:gitlab_transaction_cache_read_hit_count_total, 1, { store: store_label })
subscriber.cache_fetch_hit(event)
end
@@ -168,9 +187,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
end
it 'increments the cache_fetch_miss count and cache_read_miss total' do
- expect(transaction).to receive(:increment).with(:gitlab_cache_misses_total, 1)
+ expect(transaction).to receive(:increment).with(:gitlab_cache_misses_total, 1, { store: store_label })
expect(transaction).to receive(:increment)
- .with(:gitlab_transaction_cache_read_miss_count_total, 1)
+ .with(:gitlab_transaction_cache_read_miss_count_total, 1, { store: store_label })
subscriber.cache_generate(event)
end
diff --git a/spec/lib/gitlab/middleware/compressed_json_spec.rb b/spec/lib/gitlab/middleware/compressed_json_spec.rb
index 5978b2422e0..c0e54c89222 100644
--- a/spec/lib/gitlab/middleware/compressed_json_spec.rb
+++ b/spec/lib/gitlab/middleware/compressed_json_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Middleware::CompressedJson do
+RSpec.describe Gitlab::Middleware::CompressedJson, feature_category: :shared do
let_it_be(:decompressed_input) { '{"foo": "bar"}' }
let_it_be(:input) { ActiveSupport::Gzip.compress(decompressed_input) }
@@ -70,24 +70,6 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
end
describe '#call' do
- context 'with collector route' do
- let(:path) { '/api/v4/error_tracking/collector/1/store' }
-
- it_behaves_like 'decompress middleware'
-
- context 'with no Content-Type' do
- let(:content_type) { nil }
-
- it_behaves_like 'decompress middleware'
- end
-
- include_context 'with relative url' do
- let(:path) { "#{relative_url_root}/api/v4/error_tracking/collector/1/store" }
-
- it_behaves_like 'decompress middleware'
- end
- end
-
context 'with packages route' do
context 'with instance level endpoint' do
context 'with npm advisory bulk url' do
@@ -192,11 +174,11 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
it_behaves_like 'passes input'
end
- context 'payload is too large' do
+ context 'when payload is too large' do
let(:body_limit) { Gitlab::Middleware::CompressedJson::MAXIMUM_BODY_SIZE }
let(:decompressed_input) { 'a' * (body_limit + 100) }
let(:input) { ActiveSupport::Gzip.compress(decompressed_input) }
- let(:path) { '/api/v4/error_tracking/collector/1/envelope' }
+ let(:path) { '/api/v4/packages/npm/-/npm/v1/security/advisories/bulk' }
it 'reads only limited size' do
expect(middleware.call(env))
diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb
index 112fdb183ab..1c665ec6e18 100644
--- a/spec/lib/gitlab/omniauth_initializer_spec.rb
+++ b/spec/lib/gitlab/omniauth_initializer_spec.rb
@@ -216,6 +216,14 @@ RSpec.describe Gitlab::OmniauthInitializer do
expect { subject.execute([hash_config]) }.to raise_error(NameError)
end
+ it 'configures fail_with_empty_uid for shibboleth' do
+ shibboleth_config = { 'name' => 'shibboleth', 'args' => {} }
+
+ expect(devise_config).to receive(:omniauth).with(:shibboleth, { fail_with_empty_uid: true })
+
+ subject.execute([shibboleth_config])
+ end
+
it 'configures defaults for google_oauth2' do
google_config = {
'name' => 'google_oauth2',
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index dc62fcb4478..7cee65c13f7 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -14,6 +14,10 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
end
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ end
+
it 'return false for other types of relations' do
expect(subject.available_for_type?(User.all)).to be_falsey
end
@@ -56,6 +60,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it 'return false for other types of relations' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
+ expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey
end
end
@@ -70,6 +75,10 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it 'returns true for AuditEvent' do
expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
end
+
+ it 'returns true for Packages::BuildInfo' do
+ expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ end
end
context 'with other order-by columns' do
diff --git a/spec/lib/gitlab/patch/redis_cache_store_spec.rb b/spec/lib/gitlab/patch/redis_cache_store_spec.rb
new file mode 100644
index 00000000000..5a674d443bb
--- /dev/null
+++ b/spec/lib/gitlab/patch/redis_cache_store_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, feature_category: :scalability do
+ let(:cache) { Rails.cache }
+
+ before do
+ cache.write('x', 1)
+ cache.write('y', 2)
+ cache.write('z', 3)
+
+ cache.write('{user1}:x', 1)
+ cache.write('{user1}:y', 2)
+ cache.write('{user1}:z', 3)
+ end
+
+ describe '#read_multi_mget' do
+ shared_examples 'reading using cache stores' do
+ it 'gets multiple cross-slot keys' do
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ # fetch_multi requires a block and we have to specifically test it
+ # as it is used in the Gitlab project
+ cache.fetch_multi('x', 'y', 'z') { |key| key }
+ end
+ ).to eq({ 'x' => 1, 'y' => 2, 'z' => 3 })
+ end
+
+ it 'gets multiple keys' do
+ expect(
+ cache.fetch_multi('{user1}:x', '{user1}:y', '{user1}:z') { |key| key }
+ ).to eq({ '{user1}:x' => 1, '{user1}:y' => 2, '{user1}:z' => 3 })
+ end
+
+ context 'when reading large amount of keys' do
+ it 'batches get into pipelines of 100' do
+ cache.redis.with do |redis|
+ normal_cluster = !redis.is_a?(Gitlab::Redis::MultiStore) && Gitlab::Redis::ClusterUtil.cluster?(redis)
+ multistore_cluster = redis.is_a?(Gitlab::Redis::MultiStore) &&
+ ::Gitlab::Redis::ClusterUtil.cluster?(redis.default_store)
+
+ if normal_cluster || multistore_cluster
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:mget).and_call_original
+ end
+ end
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ cache.read_multi(*Array.new(101) { |i| i })
+ end
+ end
+ end
+ end
+
+ context 'when cache is Rails.cache' do
+ let(:cache) { Rails.cache }
+
+ context 'when reading using secondary store as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_cache: false)
+ end
+
+ it_behaves_like 'reading using cache stores'
+ end
+
+ it_behaves_like 'reading using cache stores'
+ end
+
+ context 'when cache is feature flag cache store' do
+ let(:cache) { Gitlab::Redis::FeatureFlag.cache_store }
+
+ it_behaves_like 'reading using cache stores'
+ end
+
+ context 'when cache is repository cache store' do
+ let(:cache) { Gitlab::Redis::RepositoryCache.cache_store }
+
+ it_behaves_like 'reading using cache stores'
+ end
+ end
+
+ describe '#delete_multi_entries' do
+ shared_examples 'deleting using cache stores' do
+ it 'deletes multiple cross-slot keys' do
+ expect(Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ cache.delete_multi(%w[x y z])
+ end).to eq(3)
+ end
+
+ it 'deletes multiple keys' do
+ expect(
+ cache.delete_multi(%w[{user1}:x {user1}:y {user1}:z])
+ ).to eq(3)
+ end
+
+ context 'when deleting large amount of keys' do
+ before do
+ 200.times { |i| cache.write(i, i) }
+ end
+
+ it 'calls pipeline multiple times' do
+ cache.redis.with do |redis|
+ # no expectation on number of times as it could vary depending on cluster size
+ # if the Redis is a Redis Cluster
+ if Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:del).and_call_original
+ end
+ end
+
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ cache.delete_multi(Array(0..199))
+ end
+ ).to eq(200)
+ end
+ end
+ end
+
+ context 'when cache is Rails.cache' do
+ let(:cache) { Rails.cache }
+
+ it_behaves_like 'deleting using cache stores'
+ end
+
+ context 'when cache is feature flag cache store' do
+ let(:cache) { Gitlab::Redis::FeatureFlag.cache_store }
+
+ it_behaves_like 'deleting using cache stores'
+ end
+
+ context 'when cache is repository cache store' do
+ let(:cache) { Gitlab::Redis::RepositoryCache.cache_store }
+
+ it_behaves_like 'deleting using cache stores'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 718b20c59ed..53dc145dcc4 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -258,6 +258,23 @@ RSpec.describe Gitlab::PathRegex do
end
end
+ describe '.organization_path_regex' do
+ subject { described_class.organization_path_regex }
+
+ it 'rejects reserved words' do
+ expect(subject).not_to match('admin/')
+ expect(subject).not_to match('api/')
+ expect(subject).not_to match('create/')
+ expect(subject).not_to match('new/')
+ end
+
+ it 'accepts other words' do
+ expect(subject).to match('simple/')
+ expect(subject).to match('org/')
+ expect(subject).to match('my_org/')
+ end
+ end
+
describe '.full_namespace_path_regex' do
subject { described_class.full_namespace_path_regex }
diff --git a/spec/lib/gitlab/path_traversal_spec.rb b/spec/lib/gitlab/path_traversal_spec.rb
new file mode 100644
index 00000000000..bba6f8293c2
--- /dev/null
+++ b/spec/lib/gitlab/path_traversal_spec.rb
@@ -0,0 +1,185 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::PathTraversal, feature_category: :shared do
+ using RSpec::Parameterized::TableSyntax
+
+ delegate :check_path_traversal!, :check_allowed_absolute_path!,
+ :check_allowed_absolute_path_and_path_traversal!, to: :described_class
+
+ describe '.check_path_traversal!' do
+ it 'detects path traversal in string without any separators' do
+ expect { check_path_traversal!('.') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string' do
+ expect { check_path_traversal!('../foo') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..\\foo') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string, even to just the subdirectory' do
+ expect { check_path_traversal!('../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..\\') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('/../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('\\..\\') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in the middle of the string' do
+ expect { check_path_traversal!('foo/../../bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..\\..\\bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo/..\\bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\../bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo/..\\..\\..\\..\\../bar') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string when slash-terminates' do
+ expect { check_path_traversal!('foo/../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..\\') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string' do
+ expect { check_path_traversal!('foo/..') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in string with encoded chars' do
+ expect { check_path_traversal!('foo%2F..%2Fbar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo%2F%2E%2E%2Fbar') }.to raise_error(/Invalid path/)
+ end
+
+ it 'detects double encoded chars' do
+ expect { check_path_traversal!('foo%252F..%2Fbar') }
+ .to raise_error(Gitlab::Utils::DoubleEncodingError, /is not allowed/)
+ expect { check_path_traversal!('foo%252F%2E%2E%2Fbar') }
+ .to raise_error(Gitlab::Utils::DoubleEncodingError, /is not allowed/)
+ end
+
+ it 'does nothing for a safe string' do
+ expect(check_path_traversal!('./foo')).to eq('./foo')
+ expect(check_path_traversal!('.test/foo')).to eq('.test/foo')
+ expect(check_path_traversal!('..test/foo')).to eq('..test/foo')
+ expect(check_path_traversal!('dir/..foo.rb')).to eq('dir/..foo.rb')
+ expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
+ end
+
+ it 'logs potential path traversal attempts' do
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with(message: "Potential path traversal attempt detected", path: "..")
+ expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
+ end
+
+ it 'logs does nothing for a safe string' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
+ .with(message: "Potential path traversal attempt detected", path: "dir/.foo.rb")
+ expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
+ end
+
+ it 'does nothing for nil' do
+ expect(check_path_traversal!(nil)).to be_nil
+ end
+
+ it 'does nothing for safe HashedPath' do
+ expect(check_path_traversal!(Gitlab::HashedPath.new('tmp', root_hash: 1)))
+ .to eq '6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b/tmp'
+ end
+
+ it 'raises for unsafe HashedPath' do
+ expect { check_path_traversal!(Gitlab::HashedPath.new('tmp', '..', 'etc', 'passwd', root_hash: 1)) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'raises for other non-strings' do
+ expect { check_path_traversal!(%w[/tmp /tmp/../etc/passwd]) }.to raise_error(/Invalid path/)
+ end
+ end
+
+ describe '.check_allowed_absolute_path!' do
+ let(:allowed_paths) { ['/home/foo'] }
+
+ it 'raises an exception if an absolute path is not allowed' do
+ expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'does nothing for an allowed absolute path' do
+ expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
+ end
+ end
+
+ describe '.check_allowed_absolute_path_and_path_traversal!' do
+ let(:allowed_paths) { %w[/home/foo ./foo .test/foo ..test/foo dir/..foo.rb dir/.foo.rb] }
+
+ it 'detects path traversal in string without any separators' do
+ expect { check_allowed_absolute_path_and_path_traversal!('.', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../foo', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\foo', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the start of the string, even to just the subdirectory' do
+ expect { check_allowed_absolute_path_and_path_traversal!('../', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('..\\', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('/../', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('\\..\\', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal in the middle of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../../bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\..\\bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\../bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\..\\..\\..\\../bar', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string when slash-terminates' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/../', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'detects path traversal at the end of the string' do
+ expect { check_allowed_absolute_path_and_path_traversal!('foo/..', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ expect { check_allowed_absolute_path_and_path_traversal!('foo\\..', allowed_paths) }
+ .to raise_error(/Invalid path/)
+ end
+
+ it 'does not return errors for a safe string' do
+ expect(check_allowed_absolute_path_and_path_traversal!('./foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('.test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('..test/foo', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/..foo.rb', allowed_paths)).to be_nil
+ expect(check_allowed_absolute_path_and_path_traversal!('dir/.foo.rb', allowed_paths)).to be_nil
+ end
+
+ it 'raises error for a non-string' do
+ expect { check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'raises an exception if an absolute path is not allowed' do
+ expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
+ end
+
+ it 'does nothing for an allowed absolute path' do
+ expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index b076bb65fb5..f3dcdfe2a9d 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -9,8 +9,10 @@ RSpec.describe Gitlab::ProjectAuthorizations, feature_category: :system_access d
end
end
+ let(:service) { described_class.new(user) }
+
subject(:authorizations) do
- described_class.new(user).calculate
+ service.calculate
end
# Inline this shared example while cleaning up feature flag linear_project_authorization
@@ -421,9 +423,53 @@ RSpec.describe Gitlab::ProjectAuthorizations, feature_category: :system_access d
end
end
- context 'when feature_flag linear_project_authorization_is disabled' do
+ context 'it compares values for correctness' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when values returned by the queries are the same' do
+ it 'logs a message indicating that the values are the same' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(event: 'linear_authorized_projects_check',
+ user_id: user.id,
+ matching_results: true)
+ service.calculate
+ end
+ end
+
+ context 'when values returned by queries are diffrent' do
+ before do
+ create(:project_authorization)
+ allow(service).to receive(:calculate_with_linear_query).and_return(ProjectAuthorization.all)
+ end
+
+ it 'logs a message indicating that the values are different' do
+ expect(Gitlab::AppJsonLogger).to receive(:warn).with(event: 'linear_authorized_projects_check',
+ user_id: user.id,
+ matching_results: false)
+ service.calculate
+ end
+ end
+ end
+
+ context 'when feature_flag linear_project_authorization is disabled' do
+ before do
+ stub_feature_flags(linear_project_authorization: false)
+ end
+
+ it_behaves_like 'project authorizations'
+ end
+
+ context 'when feature_flag compare_project_authorization_linear_cte is disabled' do
+ before do
+ stub_feature_flags(compare_project_authorization_linear_cte: false)
+ end
+
+ it_behaves_like 'project authorizations'
+ end
+
+ context 'when feature_flag linear_project_authorization and compare_project_authorization_linear_cte are disabled' do
before do
stub_feature_flags(linear_project_authorization: false)
+ stub_feature_flags(compare_project_authorization_linear_cte: false)
end
it_behaves_like 'project authorizations'
diff --git a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
index a78d15134fa..44bbe888c64 100644
--- a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
+++ b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
@@ -46,29 +46,35 @@ RSpec.describe Gitlab::ReactiveCacheSetCache, :clean_gitlab_redis_cache do
end
describe '#clear_cache!', :use_clean_rails_redis_caching do
- shared_examples 'clears cache' do
- it 'deletes the cached items' do
- # Cached key and value
- Rails.cache.write('test_item', 'test_value')
- # Add key to set
- cache.write(cache_prefix, 'test_item')
+ it 'deletes the cached items' do
+ # Cached key and value
+ Rails.cache.write('test_item', 'test_value')
+ # Add key to set
+ cache.write(cache_prefix, 'test_item')
- expect(cache.read(cache_prefix)).to contain_exactly('test_item')
- cache.clear_cache!(cache_prefix)
+ expect(cache.read(cache_prefix)).to contain_exactly('test_item')
+ cache.clear_cache!(cache_prefix)
- expect(cache.read(cache_prefix)).to be_empty
- end
+ expect(cache.read(cache_prefix)).to be_empty
end
- context 'when featuer flag disabled' do
+ context 'when key size is large' do
before do
- stub_feature_flags(use_pipeline_over_multikey: false)
+ 1001.times { |i| cache.write(cache_prefix, i) }
end
- it_behaves_like 'clears cache'
- end
+ it 'sends multiple pipelines of 1000 unlinks' do
+ Gitlab::Redis::Cache.with do |redis|
+ if Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:pipelined).once.and_call_original
+ end
+ end
- it_behaves_like 'clears cache'
+ cache.clear_cache!(cache_prefix)
+ end
+ end
end
describe '#include?' do
diff --git a/spec/lib/gitlab/redis/chat_spec.rb b/spec/lib/gitlab/redis/chat_spec.rb
new file mode 100644
index 00000000000..7a008580936
--- /dev/null
+++ b/spec/lib/gitlab/redis/chat_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::Chat, feature_category: :no_category do # rubocop: disable RSpec/InvalidFeatureCategory
+ include_examples "redis_new_instance_shared_examples", 'chat', Gitlab::Redis::Cache
+end
diff --git a/spec/lib/gitlab/redis/cluster_cache_spec.rb b/spec/lib/gitlab/redis/cluster_cache_spec.rb
new file mode 100644
index 00000000000..e448d608c53
--- /dev/null
+++ b/spec/lib/gitlab/redis/cluster_cache_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::ClusterCache, feature_category: :redis do
+ include_examples "redis_new_instance_shared_examples", 'cluster_cache', Gitlab::Redis::Cache
+end
diff --git a/spec/lib/gitlab/redis/cluster_util_spec.rb b/spec/lib/gitlab/redis/cluster_util_spec.rb
new file mode 100644
index 00000000000..3993004518d
--- /dev/null
+++ b/spec/lib/gitlab/redis/cluster_util_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::ClusterUtil, feature_category: :scalability do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '.cluster?' do
+ context 'when MultiStore' do
+ let(:redis_cluster) { instance_double(::Redis::Cluster) }
+
+ where(:pri_store, :sec_store, :expected_val) do
+ :cluster | :cluster | true
+ :cluster | :single | true
+ :single | :cluster | true
+ :single | :single | false
+ end
+
+ before do
+ # stub all initialiser steps in Redis::Cluster.new to avoid connecting to a Redis Cluster node
+ allow(::Redis::Cluster).to receive(:new).and_return(redis_cluster)
+ allow(redis_cluster).to receive(:is_a?).with(::Redis::Cluster).and_return(true)
+ allow(redis_cluster).to receive(:id).and_return(1)
+
+ allow(Gitlab::Redis::MultiStore).to receive(:same_redis_store?).and_return(false)
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ with_them do
+ it 'returns expected value' do
+ primary_store = pri_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ secondary_store = sec_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ multistore = Gitlab::Redis::MultiStore.new(primary_store, secondary_store, 'teststore')
+ expect(described_class.cluster?(multistore)).to eq(expected_val)
+ end
+ end
+ end
+
+ context 'when is not Redis::Cluster' do
+ it 'returns false' do
+ expect(described_class.cluster?(::Redis.new)).to be_falsey
+ end
+ end
+
+ context 'when is Redis::Cluster' do
+ let(:redis_cluster) { instance_double(::Redis::Cluster) }
+
+ before do
+ # stub all initialiser steps in Redis::Cluster.new to avoid connecting to a Redis Cluster node
+ allow(::Redis::Cluster).to receive(:new).and_return(redis_cluster)
+ allow(redis_cluster).to receive(:is_a?).with(::Redis::Cluster).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(described_class.cluster?(::Redis.new(cluster: ['redis://localhost:6000']))).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/cross_slot_spec.rb b/spec/lib/gitlab/redis/cross_slot_spec.rb
new file mode 100644
index 00000000000..b3eac4357e8
--- /dev/null
+++ b/spec/lib/gitlab/redis/cross_slot_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::CrossSlot, feature_category: :redis do
+ describe '.pipelined' do
+ context 'when using redis client' do
+ before do
+ Gitlab::Redis::Queues.with { |redis| redis.set('a', 1) }
+ end
+
+ it 'performs redis-rb pipelined' do
+ expect(Gitlab::Redis::CrossSlot::Router).not_to receive(:new)
+
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ Gitlab::Redis::Queues.with do |redis|
+ described_class::Pipeline.new(redis).pipelined do |p|
+ p.get('a')
+ p.set('b', 1)
+ end
+ end
+ end
+ ).to eq(%w[1 OK])
+ end
+ end
+
+ context 'when using with MultiStore' do
+ let(:multistore) do
+ Gitlab::Redis::MultiStore.new(
+ ::Redis.new(::Gitlab::Redis::SharedState.params),
+ ::Redis.new(::Gitlab::Redis::Sessions.params),
+ 'testing')
+ end
+
+ before do
+ Gitlab::Redis::SharedState.with { |redis| redis.set('a', 1) }
+ Gitlab::Redis::Sessions.with { |redis| redis.set('a', 1) }
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ it 'performs multistore pipelined' do
+ expect(Gitlab::Redis::CrossSlot::Router).not_to receive(:new)
+
+ expect(
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ described_class::Pipeline.new(multistore).pipelined do |p|
+ p.get('a')
+ p.set('b', 1)
+ end
+ end
+ ).to eq(%w[1 OK])
+ end
+ end
+
+ context 'when using Redis::Cluster' do
+ # Only stub redis client internals since the CI pipeline does not run a Redis Cluster
+ let(:redis) { double(:redis) } # rubocop:disable RSpec/VerifiedDoubles
+ let(:client) { double(:client) } # rubocop:disable RSpec/VerifiedDoubles
+ let(:pipeline) { double(:pipeline) } # rubocop:disable RSpec/VerifiedDoubles
+
+ let(:arguments) { %w[a b c d] }
+
+ subject do
+ described_class::Pipeline.new(redis).pipelined do |p|
+ arguments.each { |key| p.get(key) }
+ end
+ end
+
+ before do
+ allow(redis).to receive(:_client).and_return(client)
+ allow(redis).to receive(:pipelined).and_yield(pipeline)
+ allow(client).to receive(:instance_of?).with(::Redis::Cluster).and_return(true)
+ end
+
+ it 'fan-out and fan-in commands to separate shards' do
+ # simulate fan-out to 3 shards with random order
+ expect(client).to receive(:_find_node_key).exactly(4).times.and_return(3, 2, 1, 3)
+
+ arguments.each do |key|
+ f = double('future') # rubocop:disable RSpec/VerifiedDoubles
+ expect(pipeline).to receive(:get).with(key).and_return(f)
+ expect(f).to receive(:value).and_return(key)
+ end
+
+ expect(subject).to eq(arguments)
+ end
+
+ shared_examples 'fallback on cross-slot' do |redirection|
+ context 'when redis cluster undergoing slot migration' do
+ before do
+ allow(pipeline).to receive(:get).and_raise(::Redis::CommandError.new("#{redirection} 1 127.0.0.1:7001"))
+ end
+
+ it 'logs error and executes sequentially' do
+ expect(client).to receive(:_find_node_key).exactly(4).times.and_return(3, 2, 1, 3)
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(::Redis::CommandError))
+
+ arguments.each do |key|
+ expect(redis).to receive(:get).with(key).and_return(key)
+ end
+
+ subject
+ end
+ end
+ end
+
+ it_behaves_like 'fallback on cross-slot', 'MOVED'
+ it_behaves_like 'fallback on cross-slot', 'ASK'
+
+ context 'when receiving non-MOVED/ASK command errors' do
+ before do
+ allow(pipeline).to receive(:get).and_raise(::Redis::CommandError.new)
+ allow(client).to receive(:_find_node_key).exactly(4).times.and_return(3, 2, 1, 3)
+ end
+
+ it 'raises error' do
+ expect { subject }.to raise_error(::Redis::CommandError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index e45c29a9dd2..80d5915b819 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -138,6 +138,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
let_it_be(:hvalmapped) { { "item1" => value1 } }
let_it_be(:sscanargs) { [skey2, 0] }
let_it_be(:sscanval) { ["0", [value1]] }
+ let_it_be(:scanargs) { ["0"] }
+ let_it_be(:scankwargs) { { match: '*:set:key2*' } }
+ let_it_be(:scanval) { ["0", [skey2]] }
let_it_be(:sscan_eachval) { [value1] }
let_it_be(:sscan_each_arg) { { match: '*1*' } }
let_it_be(:hscan_eachval) { [[hitem1, value1]] }
@@ -162,6 +165,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
'execute :hmget command' | :hmget | ref(:hgetargs) | ref(:hmgetval) | {} | nil
'execute :mapped_hmget command' | :mapped_hmget | ref(:mhmgetargs) | ref(:hvalmapped) | {} | nil
'execute :sscan command' | :sscan | ref(:sscanargs) | ref(:sscanval) | {} | nil
+ 'execute :scan command' | :scan | ref(:scanargs) | ref(:scanval) | ref(:scankwargs) | nil
# we run *scan_each here as they are reads too
'execute :scan_each command' | :scan_each | nil | ref(:scan_each_val) | ref(:scan_each_arg) | nil
@@ -489,6 +493,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
+ 'execute :sadd? command' | :sadd? | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
'execute :unlink command' | :unlink | ref(:key3) | nil | :get | ref(:key3)
@@ -777,6 +782,25 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
end
+
+ context 'when either store is a an instance of ::Redis::Cluster' do
+ before do
+ client = double
+ allow(client).to receive(:instance_of?).with(::Redis::Cluster).and_return(true)
+ allow(primary_store).to receive(:_client).and_return(client)
+ end
+
+ it 'calls cross-slot pipeline within multistore' do
+ if name == :pipelined
+ # we intentionally exclude `.and_call_original` since primary_store/secondary_store
+ # may not be running on a proper Redis Cluster.
+ expect(Gitlab::Redis::CrossSlot::Pipeline).to receive(:new).with(primary_store).exactly(:once)
+ expect(Gitlab::Redis::CrossSlot::Pipeline).not_to receive(:new).with(secondary_store)
+ end
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/redis/rate_limiting_spec.rb b/spec/lib/gitlab/redis/rate_limiting_spec.rb
index 0bea7f8bcb2..e79c070df93 100644
--- a/spec/lib/gitlab/redis/rate_limiting_spec.rb
+++ b/spec/lib/gitlab/redis/rate_limiting_spec.rb
@@ -4,10 +4,4 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::RateLimiting do
include_examples "redis_new_instance_shared_examples", 'rate_limiting', Gitlab::Redis::Cache
-
- describe '.cache_store' do
- it 'uses the CACHE_NAMESPACE namespace' do
- expect(described_class.cache_store.options[:namespace]).to eq(Gitlab::Redis::Cache::CACHE_NAMESPACE)
- end
- end
end
diff --git a/spec/lib/gitlab/repository_cache/preloader_spec.rb b/spec/lib/gitlab/repository_cache/preloader_spec.rb
index e6fb0da6412..44d7d0e1db1 100644
--- a/spec/lib/gitlab/repository_cache/preloader_spec.rb
+++ b/spec/lib/gitlab/repository_cache/preloader_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_redis_caching,
+RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_repository_cache_store_caching,
feature_category: :source_code_management do
let(:projects) { create_list(:project, 2, :repository) }
let(:repositories) { projects.map(&:repository) }
diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb
index 6b52c315a70..e3cc6ed69fb 100644
--- a/spec/lib/gitlab/repository_hash_cache_spec.rb
+++ b/spec/lib/gitlab/repository_hash_cache_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_repository_cache, feature_category: :source_code_management do
let_it_be(:project) { create(:project) }
let(:repository) { project.repository }
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 65a50b68c44..23b2a2b9493 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_repository_cache, feature_category: :source_code_management do
let_it_be(:project) { create(:project) }
let(:repository) { project.repository }
@@ -59,8 +59,13 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
it 'writes the value to the cache' do
write_cache
- redis_keys = Gitlab::Redis::Cache.with { |redis| redis.scan(0, match: "*") }.last
- expect(redis_keys).to include("#{gitlab_cache_namespace}:branch_names:#{namespace}:set")
+ cursor, redis_keys = Gitlab::Redis::RepositoryCache.with { |redis| redis.scan(0, match: "*") }
+ while cursor != "0"
+ cursor, keys = Gitlab::Redis::RepositoryCache.with { |redis| redis.scan(cursor, match: "*") }
+ redis_keys << keys
+ end
+
+ expect(redis_keys.flatten).to include("#{gitlab_cache_namespace}:branch_names:#{namespace}:set")
expect(cache.fetch('branch_names')).to contain_exactly('main')
end
@@ -72,60 +77,64 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
end
describe '#expire' do
- shared_examples 'expires varying amount of keys' do
- subject { cache.expire(*keys) }
+ subject { cache.expire(*keys) }
- before do
- cache.write(:foo, ['value'])
- cache.write(:bar, ['value2'])
- end
+ before do
+ cache.write(:foo, ['value'])
+ cache.write(:bar, ['value2'])
+ end
- it 'actually wrote the values' do
- expect(cache.read(:foo)).to contain_exactly('value')
- expect(cache.read(:bar)).to contain_exactly('value2')
- end
+ it 'actually wrote the values' do
+ expect(cache.read(:foo)).to contain_exactly('value')
+ expect(cache.read(:bar)).to contain_exactly('value2')
+ end
- context 'single key' do
- let(:keys) { %w(foo) }
+ context 'single key' do
+ let(:keys) { %w(foo) }
- it { is_expected.to eq(1) }
+ it { is_expected.to eq(1) }
- it 'deletes the given key from the cache' do
- subject
+ it 'deletes the given key from the cache' do
+ subject
- expect(cache.read(:foo)).to be_empty
- end
+ expect(cache.read(:foo)).to be_empty
end
+ end
- context 'multiple keys' do
- let(:keys) { %w(foo bar) }
+ context 'multiple keys' do
+ let(:keys) { %w(foo bar) }
- it { is_expected.to eq(2) }
+ it { is_expected.to eq(2) }
- it 'deletes the given keys from the cache' do
- subject
+ it 'deletes the given keys from the cache' do
+ subject
- expect(cache.read(:foo)).to be_empty
- expect(cache.read(:bar)).to be_empty
- end
+ expect(cache.read(:foo)).to be_empty
+ expect(cache.read(:bar)).to be_empty
end
+ end
- context 'no keys' do
- let(:keys) { [] }
+ context 'no keys' do
+ let(:keys) { [] }
- it { is_expected.to eq(0) }
- end
+ it { is_expected.to eq(0) }
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_pipeline_over_multikey: false)
- end
+ context 'when deleting over 1000 keys' do
+ it 'deletes in batches of 1000' do
+ Gitlab::Redis::RepositoryCache.with do |redis|
+ # In a Redis Cluster, we do not want a pipeline to have too many keys
+ # but in a standalone Redis, multi-key commands can be used.
+ if ::Gitlab::Redis::ClusterUtil.cluster?(redis)
+ expect(redis).to receive(:pipelined).at_least(2).and_call_original
+ else
+ expect(redis).to receive(:unlink).and_call_original
+ end
+ end
- it_behaves_like 'expires varying amount of keys'
+ cache.expire(*(Array.new(1001) { |i| i }))
+ end
end
-
- it_behaves_like 'expires varying amount of keys'
end
describe '#exist?' do
diff --git a/spec/lib/gitlab/repository_size_error_message_spec.rb b/spec/lib/gitlab/repository_size_error_message_spec.rb
index 633ec41ab00..8fce76f50db 100644
--- a/spec/lib/gitlab/repository_size_error_message_spec.rb
+++ b/spec/lib/gitlab/repository_size_error_message_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
end
let(:message) { checker.error_message }
- let(:base_message) { 'because this repository has exceeded its size limit of 10 MB by 5 MB' }
+ let(:base_message) { 'because this repository has exceeded its size limit of 10 MiB by 5 MiB' }
before do
allow(namespace).to receive(:total_repository_size_excess).and_return(0)
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
describe '#push_error' do
context 'with exceeded_limit value' do
let(:rejection_message) do
- 'because this repository has exceeded its size limit of 10 MB by 15 MB'
+ 'because this repository has exceeded its size limit of 10 MiB by 15 MiB'
end
it 'returns the correct message' do
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
context 'when no additional repo storage is available' do
it 'returns the correct message' do
- expect(message.new_changes_error).to eq("Your push to this repository would cause it to exceed the size limit of 10 MB so it has been rejected. #{message.more_info_message}")
+ expect(message.new_changes_error).to eq("Your push to this repository would cause it to exceed the size limit of 10 MiB so it has been rejected. #{message.more_info_message}")
end
end
end
diff --git a/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb b/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb
index b15f95dbd9c..768ff368602 100644
--- a/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb
+++ b/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb
@@ -76,16 +76,4 @@ RSpec.describe Gitlab::ResourceEvents::AssignmentEventRecorder, feature_category
end.to change { ResourceEvents::MergeRequestAssignmentEvent.count }.by(1)
end
end
-
- context 'when the record_issue_and_mr_assignee_events FF is off' do
- before do
- stub_feature_flags(record_issue_and_mr_assignee_events: false)
- end
-
- it 'does nothing' do
- expect do
- described_class.new(parent: mr_with_one_assignee, old_assignees: [user2, user3]).record
- end.not_to change { mr_with_one_assignee.assignment_events.count }
- end
- end
end
diff --git a/spec/lib/gitlab/search/abuse_detection_spec.rb b/spec/lib/gitlab/search/abuse_detection_spec.rb
index 7fb9621141c..f9a1d0211b9 100644
--- a/spec/lib/gitlab/search/abuse_detection_spec.rb
+++ b/spec/lib/gitlab/search/abuse_detection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Search::AbuseDetection do
+RSpec.describe Gitlab::Search::AbuseDetection, feature_category: :global_search do
subject { described_class.new(params) }
let(:params) { { query_string: 'foobar' } }
diff --git a/spec/lib/gitlab/search/params_spec.rb b/spec/lib/gitlab/search/params_spec.rb
index 13770e550ec..3235a0b2126 100644
--- a/spec/lib/gitlab/search/params_spec.rb
+++ b/spec/lib/gitlab/search/params_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Search::Params do
+RSpec.describe Gitlab::Search::Params, feature_category: :global_search do
subject { described_class.new(params, detect_abuse: detect_abuse) }
let(:search) { 'search' }
diff --git a/spec/lib/gitlab/search_context/builder_spec.rb b/spec/lib/gitlab/search_context/builder_spec.rb
index 78799b67a69..4707299cc7d 100644
--- a/spec/lib/gitlab/search_context/builder_spec.rb
+++ b/spec/lib/gitlab/search_context/builder_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::SearchContext::Builder, type: :controller do
it 'delegates to `#with_group`' do
expect(builder).to receive(:with_group).with(project.group)
- expect(context).to be
+ expect(context).to be_present
end
it { is_expected.to be_search_context(project: project, group: project.group) }
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index a38073e7c51..ce54f853e1b 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SearchResults do
+RSpec.describe Gitlab::SearchResults, feature_category: :global_search do
include ProjectForksHelper
include SearchHelpers
using RSpec::Parameterized::TableSyntax
@@ -260,20 +260,60 @@ RSpec.describe Gitlab::SearchResults do
end
end
+ describe '#projects' do
+ let(:scope) { 'projects' }
+ let(:query) { 'Test' }
+
+ describe 'filtering' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:unarchived_project) { create(:project, :public, group: group, name: 'Test1') }
+ let_it_be(:archived_project) { create(:project, :archived, :public, group: group, name: 'Test2') }
+
+ it_behaves_like 'search results filtered by archived'
+
+ context 'when the search_projects_hide_archived feature flag is disabled' do
+ before do
+ stub_feature_flags(search_projects_hide_archived: false)
+ end
+
+ context 'when filter not provided' do
+ let(:filters) { {} }
+
+ it 'returns archived and unarchived results', :aggregate_failures do
+ expect(results.objects('projects')).to include unarchived_project
+ expect(results.objects('projects')).to include archived_project
+ end
+ end
+ end
+ end
+ end
+
describe '#users' do
it 'does not call the UsersFinder when the current_user is not allowed to read users list' do
allow(Ability).to receive(:allowed?).and_return(false)
- expect(UsersFinder).not_to receive(:new).with(user, search: 'foo').and_call_original
+ expect(UsersFinder).not_to receive(:new).with(user, { search: 'foo', use_minimum_char_limit: false }).and_call_original
results.objects('users')
end
it 'calls the UsersFinder' do
- expect(UsersFinder).to receive(:new).with(user, search: 'foo').and_call_original
+ expect(UsersFinder).to receive(:new).with(user, { search: 'foo', use_minimum_char_limit: false }).and_call_original
results.objects('users')
end
+
+ context 'when autocomplete_users_use_search_service feature flag is disabled' do
+ before do
+ stub_feature_flags(autocomplete_users_use_search_service: false)
+ end
+
+ it 'calls the UsersFinder without use_minimum_char_limit' do
+ expect(UsersFinder).to receive(:new).with(user, search: 'foo').and_call_original
+
+ results.objects('users')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/sentence_spec.rb b/spec/lib/gitlab/sentence_spec.rb
new file mode 100644
index 00000000000..b37925abbc6
--- /dev/null
+++ b/spec/lib/gitlab/sentence_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Sentence, feature_category: :shared do
+ delegate :to_exclusive_sentence, to: :described_class
+
+ describe '.to_exclusive_sentence' do
+ it 'calls #to_sentence on the array' do
+ array = double
+
+ expect(array).to receive(:to_sentence)
+
+ to_exclusive_sentence(array)
+ end
+
+ it 'joins arrays with two elements correctly' do
+ array = %w[foo bar]
+
+ expect(to_exclusive_sentence(array)).to eq('foo or bar')
+ end
+
+ it 'joins arrays with more than two elements correctly' do
+ array = %w[foo bar baz]
+
+ expect(to_exclusive_sentence(array)).to eq('foo, bar, or baz')
+ end
+
+ it 'localizes the connector words' do
+ array = %w[foo bar baz]
+
+ expect(described_class).to receive(:_).with(' or ').and_return(' <1> ')
+ expect(described_class).to receive(:_).with(', or ').and_return(', <2> ')
+ expect(to_exclusive_sentence(array)).to eq('foo, bar, <2> baz')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 4b589dc43af..1c23a619b38 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -424,6 +424,22 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
end
+
+ context 'when the job is deferred' do
+ it 'logs start and end of job with deferred job_status' do
+ travel_to(timestamp) do
+ expect(logger).to receive(:info).with(start_payload).ordered
+ expect(logger).to receive(:info).with(deferred_payload).ordered
+ expect(subject).to receive(:log_job_start).and_call_original
+ expect(subject).to receive(:log_job_done).and_call_original
+
+ call_subject(job, 'test_queue') do
+ job['deferred'] = true
+ job['deferred_by'] = :feature_flag
+ end
+ end
+ end
+ end
end
describe '#add_time_keys!' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb b/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb
new file mode 100644
index 00000000000..195a79c22ec
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::DeferJobs, feature_category: :scalability do
+ let(:job) { { 'jid' => 123, 'args' => [456] } }
+ let(:queue) { 'test_queue' }
+ let(:deferred_worker) do
+ Class.new do
+ def self.name
+ 'TestDeferredWorker'
+ end
+ include ApplicationWorker
+ end
+ end
+
+ let(:undeferred_worker) do
+ Class.new do
+ def self.name
+ 'UndeferredWorker'
+ end
+ include ApplicationWorker
+ end
+ end
+
+ subject { described_class.new }
+
+ before do
+ stub_const('TestDeferredWorker', deferred_worker)
+ stub_const('UndeferredWorker', undeferred_worker)
+ end
+
+ describe '#call' do
+ context 'with worker not opted for database health check' do
+ context 'when sidekiq_defer_jobs feature flag is enabled for a worker' do
+ before do
+ stub_feature_flags("defer_sidekiq_jobs_#{TestDeferredWorker.name}": true)
+ stub_feature_flags("defer_sidekiq_jobs_#{UndeferredWorker.name}": false)
+ end
+
+ context 'for the affected worker' do
+ it 'defers the job' do
+ expect(TestDeferredWorker).to receive(:perform_in).with(described_class::DELAY, *job['args'])
+ expect { |b| subject.call(TestDeferredWorker.new, job, queue, &b) }.not_to yield_control
+ end
+ end
+
+ context 'for other workers' do
+ it 'runs the job normally' do
+ expect { |b| subject.call(UndeferredWorker.new, job, queue, &b) }.to yield_control
+ end
+ end
+
+ it 'increments the counter' do
+ subject.call(TestDeferredWorker.new, job, queue)
+
+ counter = ::Gitlab::Metrics.registry.get(:sidekiq_jobs_deferred_total)
+ expect(counter.get({ worker: "TestDeferredWorker" })).to eq(1)
+ end
+ end
+
+ context 'when sidekiq_defer_jobs feature flag is disabled' do
+ before do
+ stub_feature_flags("defer_sidekiq_jobs_#{TestDeferredWorker.name}": false)
+ stub_feature_flags("defer_sidekiq_jobs_#{UndeferredWorker.name}": false)
+ end
+
+ it 'runs the job normally' do
+ expect { |b| subject.call(TestDeferredWorker.new, job, queue, &b) }.to yield_control
+ expect { |b| subject.call(UndeferredWorker.new, job, queue, &b) }.to yield_control
+ end
+ end
+ end
+
+ context 'with worker opted for database health check' do
+ let(:health_signal_attrs) { { gitlab_schema: :gitlab_main, delay: 1.minute, tables: [:users] } }
+
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add described_class
+ Sidekiq::Testing.inline! { example.run }
+ end
+ end
+
+ before do
+ stub_feature_flags("defer_sidekiq_jobs_#{TestDeferredWorker.name}": false)
+
+ TestDeferredWorker.defer_on_database_health_signal(*health_signal_attrs.values)
+ end
+
+ context 'without any stop signal from database health check' do
+ it 'runs the job normally' do
+ expect { |b| subject.call(TestDeferredWorker.new, job, queue, &b) }.to yield_control
+ end
+ end
+
+ context 'with stop signal from database health check' do
+ before do
+ stop_signal = instance_double("Gitlab::Database::HealthStatus::Signals::Stop", stop?: true)
+ allow(Gitlab::Database::HealthStatus).to receive(:evaluate).and_return([stop_signal])
+ end
+
+ it 'defers the job by set time' do
+ expect(TestDeferredWorker).to receive(:perform_in).with(health_signal_attrs[:delay], *job['args'])
+
+ TestDeferredWorker.perform_async(*job['args'])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 965ca612b3f..f04ada688d5 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -399,7 +399,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
with_sidekiq_server_middleware do |chain|
Gitlab::SidekiqMiddleware.server_configurator(
metrics: true,
- arguments_logger: false
+ arguments_logger: false,
+ defer_jobs: false
).call(chain)
Sidekiq::Testing.inline! { example.run }
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index af9075f5aa0..7e53b6598b6 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe Gitlab::SidekiqMiddleware do
shared_examples "a middleware chain" do
before do
configurator.call(chain)
+ stub_feature_flags("defer_sidekiq_jobs_#{worker_class.name}": false) # not letting this worker deferring its jobs
end
-
it "passes through the right middlewares", :aggregate_failures do
enabled_sidekiq_middlewares.each do |middleware|
expect_next_instances_of(middleware, 1, true) do |middleware_instance|
@@ -69,7 +69,8 @@ RSpec.describe Gitlab::SidekiqMiddleware do
::Gitlab::SidekiqStatus::ServerMiddleware,
::Gitlab::SidekiqMiddleware::WorkerContext::Server,
::Gitlab::SidekiqMiddleware::DuplicateJobs::Server,
- ::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
+ ::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware,
+ ::Gitlab::SidekiqMiddleware::DeferJobs
]
end
@@ -78,7 +79,10 @@ RSpec.describe Gitlab::SidekiqMiddleware do
with_sidekiq_server_middleware do |chain|
described_class.server_configurator(
metrics: true,
- arguments_logger: true
+ arguments_logger: true,
+ # defer_jobs has to be false because this middleware defers jobs from a worker based on
+ # `worker` type feature flag which is enabled by default in test
+ defer_jobs: false
).call(chain)
Sidekiq::Testing.inline! { example.run }
@@ -110,14 +114,16 @@ RSpec.describe Gitlab::SidekiqMiddleware do
let(:configurator) do
described_class.server_configurator(
metrics: false,
- arguments_logger: false
+ arguments_logger: false,
+ defer_jobs: false
)
end
let(:disabled_sidekiq_middlewares) do
[
Gitlab::SidekiqMiddleware::ServerMetrics,
- Gitlab::SidekiqMiddleware::ArgumentsLogger
+ Gitlab::SidekiqMiddleware::ArgumentsLogger,
+ Gitlab::SidekiqMiddleware::DeferJobs
]
end
diff --git a/spec/lib/gitlab/silent_mode_spec.rb b/spec/lib/gitlab/silent_mode_spec.rb
new file mode 100644
index 00000000000..bccf7033121
--- /dev/null
+++ b/spec/lib/gitlab/silent_mode_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SilentMode, feature_category: :geo_replication do
+ before do
+ stub_application_setting(silent_mode_enabled: silent_mode)
+ end
+
+ describe '.enabled?' do
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it { expect(described_class.enabled?).to be_truthy }
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it { expect(described_class.enabled?).to be_falsey }
+ end
+ end
+
+ describe '.log_info' do
+ let(:log_args) do
+ {
+ message: 'foo',
+ bar: 'baz'
+ }
+ end
+
+ let(:expected_log_args) { log_args.merge(silent_mode_enabled: silent_mode) }
+
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(expected_log_args)
+
+ described_class.log_info(log_args)
+ end
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(expected_log_args)
+
+ described_class.log_info(log_args)
+ end
+
+ it 'overwrites silent_mode_enabled log key if call already contains it' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(expected_log_args)
+
+ described_class.log_info(log_args.merge(silent_mode_enabled: 'foo'))
+ end
+ end
+ end
+
+ describe '.log_debug' do
+ let(:log_args) do
+ {
+ message: 'foo',
+ bar: 'baz'
+ }
+ end
+
+ let(:expected_log_args) { log_args.merge(silent_mode_enabled: silent_mode) }
+
+ context 'when silent mode is enabled' do
+ let(:silent_mode) { true }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:debug).with(expected_log_args)
+
+ described_class.log_debug(log_args)
+ end
+ end
+
+ context 'when silent mode is disabled' do
+ let(:silent_mode) { false }
+
+ it 'logs to AppJsonLogger and adds the current state of silent mode' do
+ expect(Gitlab::AppJsonLogger).to receive(:debug).with(expected_log_args)
+
+ described_class.log_debug(log_args)
+ end
+
+ it 'overwrites silent_mode_enabled log key if call already contains it' do
+ expect(Gitlab::AppJsonLogger).to receive(:debug).with(expected_log_args)
+
+ described_class.log_debug(log_args.merge(silent_mode_enabled: 'foo'))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb b/spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb
new file mode 100644
index 00000000000..9f891ceacbf
--- /dev/null
+++ b/spec/lib/gitlab/slash_commands/incident_management/incident_new_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SlashCommands::IncidentManagement::IncidentNew, feature_category: :incident_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:chat_name) { create(:chat_name, user: user) }
+ let_it_be(:regex_match) { described_class.match('incident declare') }
+
+ subject do
+ described_class.new(project, chat_name)
+ end
+
+ describe '#execute' do
+ before do
+ allow_next_instance_of(
+ Integrations::SlackInteractions::IncidentManagement::IncidentModalOpenedService
+ ) do |modal_service|
+ allow(modal_service).to receive(:execute).and_return(
+ ServiceResponse.success(message: 'Please fill the incident creation form.')
+ )
+ end
+ end
+
+ context 'when invoked' do
+ it 'sends ephemeral response' do
+ response = subject.execute(regex_match)
+
+ expect(response[:response_type]).to be(:ephemeral)
+ expect(response[:text]).to eq('Please fill the incident creation form.')
+ end
+ end
+ end
+
+ describe '#allowed?' do
+ it 'returns true' do
+ expect(described_class).to be_allowed(project, user)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(incident_declare_slash_command: false)
+ end
+
+ it 'returns false in allowed?' do
+ expect(described_class).not_to be_allowed(project, user)
+ end
+ end
+ end
+
+ describe '#collection' do
+ context 'when collection method id called' do
+ it 'calls IssuesFinder' do
+ expect_next_instance_of(IssuesFinder) do |finder|
+ expect(finder).to receive(:execute)
+ end
+
+ subject.collection
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
index ba07da51fb4..080c2803ddd 100644
--- a/spec/lib/gitlab/spamcheck/client_spec.rb
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -107,6 +107,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
before do
allow(generic_spammable).to receive_messages(
+ spammable_entity_type: 'generic',
spammable_text: 'generic spam',
created_at: generic_created_at,
updated_at: generic_updated_at,
@@ -127,6 +128,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
expect(issue_pb.updated_at).to eq timestamp_to_protobuf_timestamp(issue.updated_at)
expect(issue_pb.action).to be ::Spamcheck::Action.lookup(::Spamcheck::Action::CREATE)
expect(issue_pb.user.username).to eq user.username
+ expect(issue_pb).not_to receive(:type)
end
it 'builds the expected snippet protobuf object' do
@@ -142,6 +144,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
expect(snippet_pb.user.username).to eq user.username
expect(snippet_pb.files.first.path).to eq 'first.rb'
expect(snippet_pb.files.last.path).to eq 'second.rb'
+ expect(snippet_pb).not_to receive(:type)
end
it 'builds the expected generic protobuf object' do
@@ -149,6 +152,7 @@ RSpec.describe Gitlab::Spamcheck::Client, feature_category: :instance_resiliency
generic_pb, _ = described_class.new.send(:build_protobuf, spammable: generic_spammable, user: user, context: cxt, extra_features: {})
expect(generic_pb.text).to eq 'generic spam'
+ expect(generic_pb.type).to eq 'generic'
expect(generic_pb.created_at).to eq timestamp_to_protobuf_timestamp(generic_created_at)
expect(generic_pb.updated_at).to eq timestamp_to_protobuf_timestamp(generic_updated_at)
expect(generic_pb.action).to be ::Spamcheck::Action.lookup(::Spamcheck::Action::CREATE)
diff --git a/spec/lib/gitlab/task_helpers_spec.rb b/spec/lib/gitlab/task_helpers_spec.rb
new file mode 100644
index 00000000000..0c43dd15e8c
--- /dev/null
+++ b/spec/lib/gitlab/task_helpers_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+class TestHelpersTest
+ include Gitlab::TaskHelpers
+end
+
+RSpec.describe Gitlab::TaskHelpers do
+ subject { TestHelpersTest.new }
+
+ let(:repo) { 'https://gitlab.com/gitlab-org/gitlab-test.git' }
+ let(:clone_path) { Rails.root.join('tmp/tests/task_helpers_tests').to_s }
+ let(:version) { '1.1.0' }
+ let(:tag) { 'v1.1.0' }
+
+ describe '#checkout_or_clone_version' do
+ before do
+ allow(subject).to receive(:run_command!)
+ end
+
+ it 'checkout the version and reset to it' do
+ expect(subject).to receive(:get_version).with(version).and_call_original
+ expect(subject).to receive(:checkout_version).with(tag, clone_path)
+
+ subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path)
+ end
+
+ context "target_dir doesn't exist" do
+ it 'clones the repo' do
+ expect(subject).to receive(:clone_repo).with(repo, clone_path, clone_opts: [])
+
+ subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path)
+ end
+ end
+
+ context 'target_dir exists' do
+ before do
+ expect(Dir).to receive(:exist?).and_return(true)
+ end
+
+ it "doesn't clone the repository" do
+ expect(subject).not_to receive(:clone_repo)
+
+ subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path)
+ end
+ end
+
+ it 'accepts clone_opts' do
+ expect(subject).to receive(:clone_repo).with(repo, clone_path, clone_opts: %w[--depth 1])
+
+ subject.checkout_or_clone_version(version: version, repo: repo, target_dir: clone_path, clone_opts: %w[--depth 1])
+ end
+ end
+
+ describe '#clone_repo' do
+ it 'clones the repo in the target dir' do
+ expect(subject)
+ .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} clone -- #{repo} #{clone_path}])
+
+ subject.clone_repo(repo, clone_path)
+ end
+
+ it 'accepts clone_opts' do
+ expect(subject)
+ .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} clone --depth 1 -- #{repo} #{clone_path}])
+
+ subject.clone_repo(repo, clone_path, clone_opts: %w[--depth 1])
+ end
+ end
+
+ describe '#checkout_version' do
+ it 'clones the repo in the target dir' do
+ expect(subject)
+ .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} config protocol.version 2])
+ expect(subject)
+ .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} fetch --quiet origin #{tag}])
+ expect(subject)
+ .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} checkout -f --quiet FETCH_HEAD --])
+
+ subject.checkout_version(tag, clone_path)
+ end
+ end
+
+ describe '#run_command' do
+ it 'runs command and return the output' do
+ expect(subject.run_command(%w(echo it works!))).to eq("it works!\n")
+ end
+
+ it 'returns empty string when command doesnt exist' do
+ expect(subject.run_command(%w(nonexistentcommand with arguments))).to eq('')
+ end
+ end
+
+ describe '#run_command!' do
+ it 'runs command and return the output' do
+ expect(subject.run_command!(%w(echo it works!))).to eq("it works!\n")
+ end
+
+ it 'returns and exception when command exit with non zero code' do
+ expect { subject.run_command!(['bash', '-c', 'exit 1']) }.to raise_error Gitlab::TaskFailedError
+ end
+ end
+
+ describe '#get_version' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:version, :result) do
+ '1.1.1' | 'v1.1.1'
+ 'master' | 'master'
+ '12.4.0-rc7' | 'v12.4.0-rc7'
+ '594c3ea3e0e5540e5915bd1c49713a0381459dd6' | '594c3ea3e0e5540e5915bd1c49713a0381459dd6'
+ end
+
+ with_them do
+ it { expect(subject.get_version(version)).to eq(result) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb b/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb
deleted file mode 100644
index 4c2b3dea600..00000000000
--- a/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Template::MetricsDashboardTemplate do
- subject { described_class }
-
- describe '.all' do
- it 'combines the globals and rest' do
- all = subject.all.map(&:name)
-
- expect(all).to include('Default')
- end
- end
-
- describe '#content' do
- it 'loads the full file' do
- example_dashboard = subject.new(Rails.root.join('lib/gitlab/metrics/templates/Default.metrics-dashboard.yml'))
-
- expect(example_dashboard.name).to eq 'Default'
- expect(example_dashboard.content).to start_with('#')
- end
- end
-
- it_behaves_like 'file template shared examples', 'Default', '.metrics-dashboard.yml'
-end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index a353a3a512c..f3e27c72143 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -267,7 +267,7 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
allow(YAML).to receive(:load_file).with(Rails.root.join('config/events/filename.yml')).and_return(test_definition)
end
- it 'dispatchs the data to .event' do
+ it 'dispatches the data to .event' do
project = build_stubbed(:project)
user = build_stubbed(:user)
@@ -317,4 +317,18 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
expect(described_class).not_to be_snowplow_micro_enabled
end
end
+
+ describe 'tracker' do
+ it 'returns a SnowPlowMicro instance in development' do
+ allow(Rails.env).to receive(:development?).and_return(true)
+
+ expect(described_class.tracker).to be_an_instance_of(Gitlab::Tracking::Destinations::SnowplowMicro)
+ end
+
+ it 'returns a SnowPlow instance when not in development' do
+ allow(Rails.env).to receive(:development?).and_return(false)
+
+ expect(described_class.tracker).to be_an_instance_of(Gitlab::Tracking::Destinations::Snowplow)
+ end
+ end
end
diff --git a/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
index c56e5ce4e7a..3c1c30fc052 100644
--- a/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
+++ b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry, feature_category: :integrations do
+RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry, feature_category: :shared do
let(:ipv4) { IPAddr.new('192.168.1.1') }
describe '#initialize' do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb
new file mode 100644
index 00000000000..93814436395
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_all_ci_builds_metric_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountAllCiBuildsMetric, feature_category: :continuous_integration do
+ before do
+ create(:ci_build, created_at: 5.days.ago)
+ create(:ci_build, created_at: 1.year.ago)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' } do
+ let(:expected_value) { 2 }
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' } do
+ let(:expected_value) { 1 }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb
new file mode 100644
index 00000000000..538be7bbdc4
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_deployments_metric_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountDeploymentsMetric, feature_category: :service_ping do
+ using RSpec::Parameterized::TableSyntax
+
+ before(:all) do
+ env = create(:environment)
+ [3, 60].each do |n|
+ deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
+ create(:deployment, :failed, deployment_options)
+ create(:deployment, :success, deployment_options)
+ create(:deployment, :success, deployment_options)
+ end
+ end
+
+ where(:type, :time_frame, :expected_value) do
+ :all | 'all' | 6
+ :all | '28d' | 3
+ :success | 'all' | 4
+ :success | '28d' | 2
+ :failed | 'all' | 2
+ :failed | '28d' | 1
+ end
+
+ with_them do
+ expected_value = params[:expected_value] # rubocop: disable Lint/UselessAssignment
+ time_frame = params[:time_frame]
+ type = params[:type]
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: time_frame, options: { type: type } }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb
new file mode 100644
index 00000000000..cfd2fcabae6
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_personal_snippets_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountPersonalSnippetsMetric, feature_category: :service_ping do
+ before_all do
+ create(:personal_snippet, created_at: 5.days.ago)
+ create(:personal_snippet, created_at: 1.year.ago)
+ end
+
+ context 'with a time_frame of 28 days' do
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }
+ end
+
+ context 'with a timeframe of all' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb
new file mode 100644
index 00000000000..a82726ccf44
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_project_snippets_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectSnippetsMetric, feature_category: :service_ping do
+ before_all do
+ create(:project_snippet, created_at: 5.days.ago)
+ create(:project_snippet, created_at: 1.year.ago)
+ end
+
+ context 'with a time_frame of 28 days' do
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }
+ end
+
+ context 'with a timeframe of all' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb
new file mode 100644
index 00000000000..85d04a6e31b
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_alerts_created_metric_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectsWithAlertsCreatedMetric, feature_category: :service_ping do
+ before do
+ project = create(:project)
+ create(:alert_management_alert, project: project, created_at: 5.days.ago)
+ create(:alert_management_alert, project: project, created_at: 10.days.ago)
+ create(:alert_management_alert, created_at: 1.year.ago)
+ end
+
+ context 'with 28d timeframe' do
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d' }
+ end
+
+ context 'with all timeframe' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb
new file mode 100644
index 00000000000..daacea83833
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_snippets_metric_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountSnippetsMetric, feature_category: :service_ping do
+ before_all do
+ create(:personal_snippet, created_at: 5.days.ago)
+ create(:personal_snippet, created_at: 1.year.ago)
+
+ create(:project_snippet, created_at: 1.year.ago)
+ create(:project_snippet, created_at: 5.days.ago)
+ end
+
+ context 'with a time_frame of 28 days' do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }
+ end
+
+ context 'with a timeframe of all' do
+ let(:expected_value) { 4 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb
deleted file mode 100644
index ff6be56c13f..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/installation_creation_date_metric_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InstallationCreationDateMetric,
- feature_category: :service_ping do
- context 'with a root user' do
- let_it_be(:root) { create(:user, id: 1) }
- let_it_be(:expected_value) { root.reload.created_at } # reloading to get the timestamp from the database
-
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
- end
-
- context 'without a root user' do
- let_it_be(:another_user) { create(:user, id: 2) }
- let_it_be(:expected_value) { nil }
-
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index 271e9595703..5002ee7599f 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -68,6 +68,10 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator, feature_cate
end
context 'for add metrics' do
+ before do
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/issues/414887'
+ end
+
it_behaves_like 'name suggestion' do
# corresponding metric is collected with add(data[:personal_snippets], data[:project_snippets])
let(:key_path) { 'counts.snippets' }
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index f1ce48468fe..a848c286fa9 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -120,9 +120,9 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c
# Because test cases are run inside a transaction, if any query raise and error all queries that follows
# it are automatically canceled by PostgreSQL, to avoid that problem, and to provide exhaustive information
# about every metric, queries are wrapped explicitly in sub transactions.
- table = PgQuery.parse(query).tables.first
- gitlab_schema = Gitlab::Database::GitlabSchema.tables_to_schema[table]
- base_model = gitlab_schema == :gitlab_main ? ApplicationRecord : Ci::ApplicationRecord
+ table_name = PgQuery.parse(query).tables.first
+ gitlab_schema = Gitlab::Database::GitlabSchema.table_schema!(table_name)
+ base_model = Gitlab::Database.schemas_to_base_models.fetch(gitlab_schema).first
base_model.transaction do
base_model.connection.execute(query)&.first&.values&.first
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index b962757c35b..50fb9f9df6e 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -23,91 +23,10 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
described_class.clear_memoization(:known_events)
end
- describe '.track_event' do
- # ToDo: remove during https://gitlab.com/groups/gitlab-org/-/epics/9542 cleanup
- describe 'daily to weekly key migration precautions' do
- let(:event_a_name) { 'example_event_a' }
- let(:event_b_name) { 'example_event_b' }
- let(:known_events) do
- [
- { name: event_a_name, aggregation: 'daily' },
- { name: event_b_name, aggregation: 'weekly' }
- ].map(&:with_indifferent_access)
- end
-
- let(:start_date) { (Date.current - 1.week).beginning_of_week }
- let(:end_date) { Date.current }
-
- let(:daily_event) { known_events.first }
- let(:daily_key) { described_class.send(:redis_key, daily_event, start_date) }
- let(:weekly_key) do
- weekly_event = known_events.first.merge(aggregation: 'weekly')
- described_class.send(:redis_key, weekly_event, start_date)
- end
-
- before do
- allow(described_class).to receive(:load_events).with(described_class::KNOWN_EVENTS_PATH).and_return(known_events)
- allow(described_class).to receive(:load_events).with(/ee/).and_return([])
- end
-
- shared_examples 'writes daily events to daily and weekly keys' do
- it :aggregate_failures do
- expect(Gitlab::Redis::HLL).to receive(:add).with(expiry: 29.days, key: daily_key, value: 1).and_call_original
- expect(Gitlab::Redis::HLL).to receive(:add).with(expiry: 6.weeks, key: weekly_key, value: 1).and_call_original
-
- described_class.track_event(event_a_name, values: 1, time: start_date)
- end
- end
-
- context 'when revert_daily_hll_events_to_weekly_aggregation FF is disabled' do
- before do
- stub_feature_flags(revert_daily_hll_events_to_weekly_aggregation: false)
- end
-
- it_behaves_like 'writes daily events to daily and weekly keys'
-
- it 'aggregates weekly for daily keys', :aggregate_failures do
- expect(Gitlab::Redis::HLL).to receive(:count).with(keys: [weekly_key]).and_call_original
- expect(Gitlab::Redis::HLL).not_to receive(:count).with(keys: [daily_key]).and_call_original
-
- described_class.unique_events(event_names: [event_a_name], start_date: start_date, end_date: end_date)
- end
-
- it 'does not persists changes to event aggregation attribute' do
- described_class.unique_events(event_names: [event_a_name], start_date: start_date, end_date: end_date)
-
- expect(described_class.known_events.find { |e| e[:name] == event_a_name }[:aggregation])
- .to eql 'daily'
- end
- end
-
- context 'when revert_daily_hll_events_to_weekly_aggregation FF is enabled' do
- before do
- stub_feature_flags(revert_daily_hll_events_to_weekly_aggregation: true)
- end
-
- # we want to write events no matter of the feature state
- it_behaves_like 'writes daily events to daily and weekly keys'
-
- it 'aggregates daily for daily keys', :aggregate_failures do
- expect(Gitlab::Redis::HLL).to receive(:count).with(keys: [daily_key]).and_call_original
- expect(Gitlab::Redis::HLL).not_to receive(:count).with(keys: [weekly_key]).and_call_original
-
- described_class.unique_events(event_names: [event_a_name], start_date: start_date, end_date: start_date)
- end
- end
- end
- end
-
describe '.known_events' do
let(:ce_temp_dir) { Dir.mktmpdir }
let(:ce_temp_file) { Tempfile.new(%w[common .yml], ce_temp_dir) }
- let(:ce_event) do
- {
- "name" => "ce_event",
- "aggregation" => "weekly"
- }
- end
+ let(:ce_event) { { "name" => "ce_event" } }
before do
stub_const("#{described_class}::KNOWN_EVENTS_PATH", File.expand_path('*.yml', ce_temp_dir))
@@ -144,13 +63,13 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:known_events) do
[
- { name: weekly_event, aggregation: "weekly" },
- { name: daily_event, aggregation: "daily" },
- { name: category_productivity_event, aggregation: "weekly" },
- { name: compliance_slot_event, aggregation: "weekly" },
- { name: no_slot, aggregation: "daily" },
- { name: different_aggregation, aggregation: "monthly" },
- { name: context_event, aggregation: 'weekly' }
+ { name: weekly_event },
+ { name: daily_event },
+ { name: category_productivity_event },
+ { name: compliance_slot_event },
+ { name: no_slot },
+ { name: different_aggregation },
+ { name: context_event }
].map(&:with_indifferent_access)
end
@@ -203,15 +122,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'tracks events with multiple values' do
values = [entity1, entity2]
expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_analytics_contribution/, value: values,
- expiry: described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH)
+ expiry: described_class::KEY_EXPIRY_LENGTH)
described_class.track_event(:g_analytics_contribution, values: values)
end
- it "raise error if metrics don't have same aggregation" do
- expect { described_class.track_event(different_aggregation, values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
- end
-
it 'raise error if metrics of unknown event' do
expect { described_class.track_event('unknown', values: entity1, time: Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
@@ -248,22 +163,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
expect(keys).not_to be_empty
keys.each do |key|
- expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH)
- end
- end
- end
- end
-
- context 'for daily events' do
- it 'sets the keys in Redis to expire' do
- described_class.track_event("no_slot", values: entity1)
-
- Gitlab::Redis::SharedState.with do |redis|
- keys = redis.scan_each(match: "*_no_slot").to_a
- expect(keys).not_to be_empty
-
- keys.each do |key|
- expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::DEFAULT_DAILY_KEY_EXPIRY_LENGTH)
+ expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::KEY_EXPIRY_LENGTH)
end
end
end
@@ -285,7 +185,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
values = [entity1, entity2]
expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_analytics_contribution/,
value: values,
- expiry: described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH)
+ expiry: described_class::KEY_EXPIRY_LENGTH)
described_class.track_event_in_context(:g_analytics_contribution, values: values, context: default_context)
end
@@ -347,12 +247,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
expect(described_class.unique_events(event_names: [weekly_event], start_date: Date.current, end_date: 4.weeks.ago)).to eq(-1)
end
- it "raise error if metrics don't have same aggregation" do
- expect do
- described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current)
- end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::AggregationMismatch)
- end
-
context 'when data for the last complete week' do
it { expect(described_class.unique_events(event_names: [weekly_event], start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
end
@@ -369,12 +263,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it { expect(described_class.unique_events(event_names: [weekly_event.to_sym], start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
end
- context 'when using daily aggregation' do
- it { expect(described_class.unique_events(event_names: [daily_event], start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
- it { expect(described_class.unique_events(event_names: [daily_event], start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
- it { expect(described_class.unique_events(event_names: [daily_event], start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
- end
-
context 'when no slot is set' do
it { expect(described_class.unique_events(event_names: [no_slot], start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
end
@@ -388,7 +276,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- describe '.weekly_redis_keys' do
+ describe '.keys_for_aggregation' do
using RSpec::Parameterized::TableSyntax
let(:weekly_event) { 'i_search_total' }
@@ -398,7 +286,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:week_three) { "{#{described_class::REDIS_SLOT}}_i_search_total-2021-01" }
let(:week_four) { "{#{described_class::REDIS_SLOT}}_i_search_total-2021-02" }
- subject(:weekly_redis_keys) { described_class.send(:weekly_redis_keys, events: [redis_event], start_date: DateTime.parse(start_date), end_date: DateTime.parse(end_date)) }
+ subject(:keys_for_aggregation) { described_class.send(:keys_for_aggregation, events: [redis_event], start_date: DateTime.parse(start_date), end_date: DateTime.parse(end_date)) }
where(:start_date, :end_date, :keys) do
'2020-12-21' | '2020-12-21' | []
@@ -421,11 +309,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
it 'returns 1 key for last for week' do
- expect(described_class.send(:weekly_redis_keys, events: [redis_event], start_date: 7.days.ago.to_date, end_date: Date.current).size).to eq 1
+ expect(described_class.send(:keys_for_aggregation, events: [redis_event], start_date: 7.days.ago.to_date, end_date: Date.current).size).to eq 1
end
it 'returns 4 key for last for weeks' do
- expect(described_class.send(:weekly_redis_keys, events: [redis_event], start_date: 4.weeks.ago.to_date, end_date: Date.current).size).to eq 4
+ expect(described_class.send(:keys_for_aggregation, events: [redis_event], start_date: 4.weeks.ago.to_date, end_date: Date.current).size).to eq 4
end
end
@@ -434,9 +322,9 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:known_events) do
[
- { name: 'event_name_1', aggregation: "weekly" },
- { name: 'event_name_2', aggregation: "weekly" },
- { name: 'event_name_3', aggregation: "weekly" }
+ { name: 'event_name_1' },
+ { name: 'event_name_2' },
+ { name: 'event_name_3' }
].map(&:with_indifferent_access)
end
@@ -475,11 +363,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:time_range) { { start_date: 7.days.ago, end_date: DateTime.current } }
let(:known_events) do
[
- { name: 'event1_slot', aggregation: "weekly" },
- { name: 'event2_slot', aggregation: "weekly" },
- { name: 'event3_slot', aggregation: "weekly" },
- { name: 'event5_slot', aggregation: "daily" },
- { name: 'event4', aggregation: "weekly" }
+ { name: 'event1_slot' },
+ { name: 'event2_slot' },
+ { name: 'event3_slot' },
+ { name: 'event5_slot' },
+ { name: 'event4' }
].map(&:with_indifferent_access)
end
diff --git a/spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..e034f04ff92
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/jetbrains_bundled_plugin_activity_unique_counter_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::JetBrainsBundledPluginActivityUniqueCounter, :clean_gitlab_redis_shared_state, feature_category: :editor_extensions do # rubocop:disable RSpec/FilePath
+ let(:user1) { build(:user, id: 1) }
+ let(:user2) { build(:user, id: 2) }
+ let(:time) { Time.current }
+ let(:action) { described_class::JETBRAINS_BUNDLED_API_REQUEST_ACTION }
+ let(:user_agent_string) do
+ 'IntelliJ-GitLab-Plugin PhpStorm/PS-232.6734.11 (JRE 17.0.7+7-b966.2; Linux 6.2.0-20-generic; amd64)'
+ end
+
+ let(:user_agent) { { user_agent: user_agent_string } }
+
+ context 'when tracking a jetbrains bundled api request' do
+ it_behaves_like 'a request from an extension'
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
index ced9ec7f221..42855271e22 100644
--- a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
let(:events) do
{
'gitops_sync' => 1,
- 'k8s_api_proxy_request' => 2
+ 'k8s_api_proxy_request' => 2,
+ 'flux_git_push_notifications_total' => 3
}
end
@@ -23,7 +24,10 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
described_class.increment_event_counts(events)
described_class.increment_event_counts(events)
- expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 3, kubernetes_agent_k8s_api_proxy_request: 6)
+ expect(described_class.totals).to eq(
+ kubernetes_agent_gitops_sync: 3,
+ kubernetes_agent_k8s_api_proxy_request: 6,
+ kubernetes_agent_flux_git_push_notifications_total: 9)
end
context 'with empty events' do
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index e41da6d9ea2..25c57aa00c6 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -54,6 +54,11 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:merge_request) { create(:merge_request) }
let(:target_project) { merge_request.target_project }
+ let(:fake_tracker) { instance_spy(Gitlab::Tracking::Destinations::Snowplow) }
+
+ before do
+ allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_tracker)
+ end
it_behaves_like 'a tracked merge request unique event' do
let(:action) { described_class::MR_USER_CREATE_ACTION }
@@ -63,14 +68,36 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:action) { described_class::MR_CREATE_ACTION }
end
- it_behaves_like 'Snowplow event tracking with RedisHLL context' do
- let(:action) { :create }
- let(:category) { described_class.name }
- let(:project) { target_project }
- let(:namespace) { project.namespace.reload }
- let(:user) { project.creator }
- let(:label) { 'redis_hll_counters.code_review.i_code_review_user_create_mr_monthly' }
- let(:property) { described_class::MR_USER_CREATE_ACTION }
+ it 'logs to Snowplow', :aggregate_failures do
+ # This logic should be extracted to shared_examples
+ namespace = target_project.namespace
+
+ expect(Gitlab::Tracking::StandardContext)
+ .to receive(:new)
+ .with(
+ project_id: target_project.id,
+ user_id: user.id,
+ namespace_id: namespace.id,
+ plan_name: namespace.actual_plan_name
+ )
+ .and_call_original
+
+ expect(Gitlab::Tracking::ServicePingContext)
+ .to receive(:new)
+ .with(data_source: :redis_hll, event: described_class::MR_USER_CREATE_ACTION)
+ .and_call_original
+
+ expect(fake_tracker).to receive(:event)
+ .with(
+ 'InternalEventTracking',
+ described_class::MR_USER_CREATE_ACTION,
+ context: [
+ an_instance_of(SnowplowTracker::SelfDescribingJson),
+ an_instance_of(SnowplowTracker::SelfDescribingJson)
+ ]
+ )
+ .exactly(:once)
+ subject
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 4544cb2eb26..9df869f8801 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -356,6 +356,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
create(:project_error_tracking_setting)
create(:incident)
create(:incident, alert_management_alert: create(:alert_management_alert))
+ create(:issue, alert_management_alert: create(:alert_management_alert))
create(:alert_management_http_integration, :active, project: project)
end
@@ -365,7 +366,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
operations_dashboard_default_dashboard: 2,
projects_with_error_tracking_enabled: 2,
projects_with_incidents: 4,
- projects_with_alert_incidents: 2,
+ projects_with_alert_incidents: 4,
projects_with_enabled_alert_integrations_histogram: { '1' => 2 }
)
@@ -376,7 +377,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
operations_dashboard_default_dashboard: 1,
projects_with_error_tracking_enabled: 1,
projects_with_incidents: 2,
- projects_with_alert_incidents: 1
+ projects_with_alert_incidents: 2
)
expect(data_28_days).not_to include(:projects_with_enabled_alert_integrations_histogram)
@@ -539,7 +540,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
expect(count_data[:projects_with_enabled_alert_integrations]).to eq(1)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
expect(count_data[:projects_with_terraform_states]).to eq(2)
- expect(count_data[:projects_with_alerts_created]).to eq(1)
expect(count_data[:protected_branches]).to eq(2)
expect(count_data[:protected_branches_except_default]).to eq(1)
expect(count_data[:terraform_reports]).to eq(6)
@@ -568,13 +568,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
expect(count_data[:kubernetes_agents]).to eq(2)
expect(count_data[:kubernetes_agents_with_token]).to eq(1)
- expect(count_data[:deployments]).to eq(4)
- expect(count_data[:successful_deployments]).to eq(2)
- expect(count_data[:failed_deployments]).to eq(2)
expect(count_data[:feature_flags]).to eq(1)
- expect(count_data[:snippets]).to eq(6)
- expect(count_data[:personal_snippets]).to eq(2)
- expect(count_data[:project_snippets]).to eq(4)
expect(count_data[:projects_creating_incidents]).to eq(2)
expect(count_data[:projects_with_packages]).to eq(2)
@@ -626,19 +620,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
let_it_be(:project) { create(:project, created_at: 3.days.ago) }
before do
- env = create(:environment)
create(:package, project: project, created_at: 3.days.ago)
create(:package, created_at: 2.months.ago, project: project)
- [3, 31].each do |n|
- deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
- create(:deployment, :failed, deployment_options)
- create(:deployment, :success, deployment_options)
- create(:project_snippet, project: project, created_at: n.days.ago)
- create(:personal_snippet, created_at: n.days.ago)
- create(:alert_management_alert, project: project, created_at: n.days.ago)
- end
-
for_defined_days_back do
create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
end
@@ -649,26 +633,11 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
it 'gathers monthly usage counts correctly' do
counts_monthly = subject[:counts_monthly]
- expect(counts_monthly[:deployments]).to eq(2)
- expect(counts_monthly[:successful_deployments]).to eq(1)
- expect(counts_monthly[:failed_deployments]).to eq(1)
- expect(counts_monthly[:snippets]).to eq(2)
- expect(counts_monthly[:personal_snippets]).to eq(1)
- expect(counts_monthly[:project_snippets]).to eq(1)
- expect(counts_monthly[:projects_with_alerts_created]).to eq(1)
expect(counts_monthly[:projects]).to eq(1)
expect(counts_monthly[:packages]).to eq(1)
end
end
- describe '.license_usage_data' do
- subject { described_class.license_usage_data }
-
- it 'gathers license data' do
- expect(subject[:recorded_at]).to be_a(Time)
- end
- end
-
context 'when not relying on database records' do
describe '.features_usage_data_ce' do
subject { described_class.features_usage_data_ce }
diff --git a/spec/lib/gitlab/utils/markdown_spec.rb b/spec/lib/gitlab/utils/markdown_spec.rb
index 0a7d1160bbc..45953c7906e 100644
--- a/spec/lib/gitlab/utils/markdown_spec.rb
+++ b/spec/lib/gitlab/utils/markdown_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::Utils::Markdown do
context 'when string has a product suffix' do
%w[CORE STARTER PREMIUM ULTIMATE FREE BRONZE SILVER GOLD].each do |tier|
- ['', ' ONLY', ' SELF', ' SASS'].each do |modifier|
+ ['', ' ONLY', ' SELF', ' SAAS'].each do |modifier|
context "#{tier}#{modifier}" do
let(:string) { "My Header (#{tier}#{modifier})" }
diff --git a/spec/lib/gitlab/utils/sanitize_node_link_spec.rb b/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
index 1fc10bc3aa8..dab3174a4a7 100644
--- a/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
+++ b/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+# TODO: change to fast_spec_helper in scope of https://gitlab.com/gitlab-org/gitlab/-/issues/413779
+require 'spec_helper'
require 'html/pipeline'
require 'addressable'
@@ -27,9 +28,13 @@ RSpec.describe Gitlab::Utils::SanitizeNodeLink do
" &#14; javascript:"
]
- invalid_schemes.each do |scheme|
- context "with the scheme: #{scheme}" do
- describe "#remove_unsafe_links" do
+ describe "#remove_unsafe_links" do
+ subject { object.remove_unsafe_links(env, remove_invalid_links: true) }
+
+ let(:env) { { node: node } }
+
+ invalid_schemes.each do |scheme|
+ context "with the scheme: #{scheme}" do
tags = {
a: {
doc: HTML::Pipeline.parse("<a href='#{scheme}alert(1);'>foo</a>"),
@@ -55,19 +60,55 @@ RSpec.describe Gitlab::Utils::SanitizeNodeLink do
tags.each do |tag, opts|
context "<#{tag}> tags" do
- it "removes the unsafe link" do
- node = opts[:node_to_check].call(opts[:doc])
+ let(:node) { opts[:node_to_check].call(opts[:doc]) }
- expect { object.remove_unsafe_links({ node: node }, remove_invalid_links: true) }
- .to change { node[opts[:attr]] }
+ it "removes the unsafe link" do
+ expect { subject }.to change { node[opts[:attr]] }
expect(node[opts[:attr]]).to be_blank
end
end
end
end
+ end
+
+ context 'when URI is valid' do
+ let(:doc) { HTML::Pipeline.parse("<a href='http://example.com'>foo</a>") }
+ let(:node) { doc.children.first }
+
+ it 'does not remove it' do
+ subject
+
+ expect(node[:href]).to eq('http://example.com')
+ end
+ end
+
+ context 'when URI is invalid' do
+ let(:doc) { HTML::Pipeline.parse("<a href='http://example:wrong_port.com'>foo</a>") }
+ let(:node) { doc.children.first }
+
+ it 'removes the link' do
+ subject
+
+ expect(node[:href]).to be_nil
+ end
+ end
+
+ context 'when URI is encoded but still invalid' do
+ let(:doc) { HTML::Pipeline.parse("<a href='http://example%EF%BC%9A%E7%BD%91'>foo</a>") }
+ let(:node) { doc.children.first }
+
+ it 'removes the link' do
+ subject
- describe "#safe_protocol?" do
+ expect(node[:href]).to be_nil
+ end
+ end
+ end
+
+ describe "#safe_protocol?" do
+ invalid_schemes.each do |scheme|
+ context "with the scheme: #{scheme}" do
let(:doc) { HTML::Pipeline.parse("<a href='#{scheme}alert(1);'>foo</a>") }
let(:node) { doc.children.first }
let(:uri) { Addressable::URI.parse(node['href']) }
@@ -78,4 +119,14 @@ RSpec.describe Gitlab::Utils::SanitizeNodeLink do
end
end
end
+
+ describe '#sanitize_unsafe_links' do
+ let(:env) { { node: 'node' } }
+
+ it 'makes a call to #remove_unsafe_links_method' do
+ expect(object).to receive(:remove_unsafe_links).with(env)
+
+ object.sanitize_unsafe_links(env)
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 586ee04a835..b4672a9d1c4 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -182,7 +182,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'counts over joined relations' do
- expect(described_class.estimate_batch_distinct_count(model.joins(:build), "ci_builds.name")).to eq(ci_builds_estimated_cardinality)
+ expect(described_class.estimate_batch_distinct_count(model.joins(:build), "#{Ci::Build.table_name}.name")).to eq(ci_builds_estimated_cardinality)
end
it 'counts with :column field with batch_size of 50K' do
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 102d608072b..7b9504366ec 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -6,139 +6,9 @@ RSpec.describe Gitlab::Utils do
using RSpec::Parameterized::TableSyntax
delegate :to_boolean, :boolean_to_yes_no, :slugify, :which,
- :ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
- :append_path, :remove_leading_slashes, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!,
- :decode_path, :ms_to_round_sec, :check_allowed_absolute_path_and_path_traversal!, to: :described_class
-
- describe '.check_path_traversal!' do
- it 'detects path traversal in string without any separators' do
- expect { check_path_traversal!('.') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string' do
- expect { check_path_traversal!('../foo') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('..\\foo') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string, even to just the subdirectory' do
- expect { check_path_traversal!('../') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('..\\') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('/../') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('\\..\\') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal in the middle of the string' do
- expect { check_path_traversal!('foo/../../bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\..\\..\\bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo/..\\bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\../bar') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo/..\\..\\..\\..\\../bar') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string when slash-terminates' do
- expect { check_path_traversal!('foo/../') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\..\\') }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string' do
- expect { check_path_traversal!('foo/..') }.to raise_error(/Invalid path/)
- expect { check_path_traversal!('foo\\..') }.to raise_error(/Invalid path/)
- end
-
- it 'does nothing for a safe string' do
- expect(check_path_traversal!('./foo')).to eq('./foo')
- expect(check_path_traversal!('.test/foo')).to eq('.test/foo')
- expect(check_path_traversal!('..test/foo')).to eq('..test/foo')
- expect(check_path_traversal!('dir/..foo.rb')).to eq('dir/..foo.rb')
- expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
- end
-
- it 'logs potential path traversal attempts' do
- expect(Gitlab::AppLogger).to receive(:warn).with(message: "Potential path traversal attempt detected", path: "..")
- expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
- end
-
- it 'logs does nothing for a safe string' do
- expect(Gitlab::AppLogger).not_to receive(:warn).with(message: "Potential path traversal attempt detected", path: "dir/.foo.rb")
- expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
- end
-
- it 'does nothing for nil' do
- expect(check_path_traversal!(nil)).to be_nil
- end
-
- it 'does nothing for safe HashedPath' do
- expect(check_path_traversal!(Gitlab::HashedPath.new('tmp', root_hash: 1))).to eq '6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b/tmp'
- end
-
- it 'raises for unsafe HashedPath' do
- expect { check_path_traversal!(Gitlab::HashedPath.new('tmp', '..', 'etc', 'passwd', root_hash: 1)) }.to raise_error(/Invalid path/)
- end
-
- it 'raises for other non-strings' do
- expect { check_path_traversal!(%w[/tmp /tmp/../etc/passwd]) }.to raise_error(/Invalid path/)
- end
- end
-
- describe '.check_allowed_absolute_path_and_path_traversal!' do
- let(:allowed_paths) { %w[/home/foo ./foo .test/foo ..test/foo dir/..foo.rb dir/.foo.rb] }
-
- it 'detects path traversal in string without any separators' do
- expect { check_allowed_absolute_path_and_path_traversal!('.', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('..', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string' do
- expect { check_allowed_absolute_path_and_path_traversal!('../foo', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('..\\foo', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the start of the string, even to just the subdirectory' do
- expect { check_allowed_absolute_path_and_path_traversal!('../', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('..\\', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('/../', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal in the middle of the string' do
- expect { check_allowed_absolute_path_and_path_traversal!('foo/../../bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo/..\\..\\..\\..\\../bar', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string when slash-terminates' do
- expect { check_allowed_absolute_path_and_path_traversal!('foo/../', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\..\\', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'detects path traversal at the end of the string' do
- expect { check_allowed_absolute_path_and_path_traversal!('foo/..', allowed_paths) }.to raise_error(/Invalid path/)
- expect { check_allowed_absolute_path_and_path_traversal!('foo\\..', allowed_paths) }.to raise_error(/Invalid path/)
- end
-
- it 'does not return errors for a safe string' do
- expect(check_allowed_absolute_path_and_path_traversal!('./foo', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('.test/foo', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('..test/foo', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('dir/..foo.rb', allowed_paths)).to be_nil
- expect(check_allowed_absolute_path_and_path_traversal!('dir/.foo.rb', allowed_paths)).to be_nil
- end
-
- it 'raises error for a non-string' do
- expect { check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths) }.to raise_error(StandardError)
- end
-
- it 'raises an exception if an absolute path is not allowed' do
- expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
- end
-
- it 'does nothing for an allowed absolute path' do
- expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
- end
- end
+ :ensure_array_from_string, :bytes_to_megabytes,
+ :append_path, :remove_leading_slashes, :allowlisted?,
+ :decode_path, :ms_to_round_sec, to: :described_class
describe '.allowlisted?' do
let(:allowed_paths) { ['/home/foo', '/foo/bar', '/etc/passwd'] }
@@ -152,18 +22,6 @@ RSpec.describe Gitlab::Utils do
end
end
- describe '.check_allowed_absolute_path!' do
- let(:allowed_paths) { ['/home/foo'] }
-
- it 'raises an exception if an absolute path is not allowed' do
- expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
- end
-
- it 'does nothing for an allowed absolute path' do
- expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
- end
- end
-
describe '.decode_path' do
it 'returns path unencoded for singled-encoded paths' do
expect(decode_path('%2Fhome%2Fbar%3Fasd%3Dqwe')).to eq('/home/bar?asd=qwe')
@@ -212,36 +70,6 @@ RSpec.describe Gitlab::Utils do
end
end
- describe '.to_exclusive_sentence' do
- it 'calls #to_sentence on the array' do
- array = double
-
- expect(array).to receive(:to_sentence)
-
- to_exclusive_sentence(array)
- end
-
- it 'joins arrays with two elements correctly' do
- array = %w(foo bar)
-
- expect(to_exclusive_sentence(array)).to eq('foo or bar')
- end
-
- it 'joins arrays with more than two elements correctly' do
- array = %w(foo bar baz)
-
- expect(to_exclusive_sentence(array)).to eq('foo, bar, or baz')
- end
-
- it 'localizes the connector words' do
- array = %w(foo bar baz)
-
- expect(described_class).to receive(:_).with(' or ').and_return(' <1> ')
- expect(described_class).to receive(:_).with(', or ').and_return(', <2> ')
- expect(to_exclusive_sentence(array)).to eq('foo, bar, <2> baz')
- end
- end
-
describe '.nlbr' do
it 'replaces new lines with <br>' do
expect(described_class.nlbr("<b>hello</b>\n<i>world</i>")).to eq("hello<br>world")
diff --git a/spec/lib/gitlab/verify/ci_secure_files_spec.rb b/spec/lib/gitlab/verify/ci_secure_files_spec.rb
new file mode 100644
index 00000000000..4fd2db85ec2
--- /dev/null
+++ b/spec/lib/gitlab/verify/ci_secure_files_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Verify::CiSecureFiles, factory_default: :keep, feature_category: :mobile_devops do
+ include GitlabVerifyHelpers
+
+ it_behaves_like 'Gitlab::Verify::BatchVerifier subclass' do
+ let_it_be(:objects) { create_list(:ci_secure_file, 3) }
+ end
+
+ describe '#run_batches' do
+ let_it_be(:project) { create(:project) }
+ let(:failures) { collect_failures }
+ let(:failure) { failures[secure_file] }
+
+ let!(:secure_file) { create(:ci_secure_file, project: project) }
+
+ it 'passes secure_files with the correct file' do
+ expect(failures).to eq({})
+ end
+
+ it 'fails secure_files with a missing file' do
+ FileUtils.rm_f(secure_file.file.path)
+
+ expect(failures.keys).to contain_exactly(secure_file)
+ expect(failure).to include('No such file or directory')
+ expect(failure).to include(secure_file.file.path)
+ end
+
+ it 'fails secure_files with a mismatched checksum' do
+ secure_file.update!(checksum: 'something incorrect')
+
+ expect(failures.keys).to contain_exactly(secure_file)
+ expect(failure).to include('Checksum mismatch')
+ end
+
+ context 'with remote files' do
+ let(:file) { CarrierWaveStringFile.new }
+
+ before do
+ stub_ci_secure_file_object_storage
+ secure_file.update!(file_store: ObjectStorage::Store::REMOTE)
+ end
+
+ describe 'returned hash object' do
+ it 'passes secure_files in object storage that exist' do
+ expect(CarrierWave::Storage::Fog::File).to receive(:new).and_return(file)
+ expect(file).to receive(:exists?).and_return(true)
+
+ expect(failures).to eq({})
+ end
+
+ it 'fails secure_files in object storage that do not exist' do
+ expect(CarrierWave::Storage::Fog::File).to receive(:new).and_return(file)
+ expect(file).to receive(:exists?).and_return(false)
+
+ expect(failures.keys).to contain_exactly(secure_file)
+ expect(failure).to include('Remote object does not exist')
+ end
+ end
+ end
+ end
+end