Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/ci/helpers/runner_spec.rb2
-rw-r--r--spec/lib/api/entities/ci/job_request/image_spec.rb9
-rw-r--r--spec/lib/api/entities/ci/job_request/service_spec.rb2
-rw-r--r--spec/lib/api/entities/hook_spec.rb25
-rw-r--r--spec/lib/api/entities/ml/mlflow/run_info_spec.rb19
-rw-r--r--spec/lib/api/entities/project_import_status_spec.rb1
-rw-r--r--spec/lib/api/entities/projects/repository_storage_move_spec.rb2
-rw-r--r--spec/lib/api/entities/snippets/repository_storage_move_spec.rb2
-rw-r--r--spec/lib/api/helpers/import_github_helpers_spec.rb3
-rw-r--r--spec/lib/api/ml/mlflow/api_helpers_spec.rb76
-rw-r--r--spec/lib/backup/database_configuration_spec.rb239
-rw-r--r--spec/lib/backup/database_connection_spec.rb103
-rw-r--r--spec/lib/backup/database_model_spec.rb55
-rw-r--r--spec/lib/backup/database_spec.rb127
-rw-r--r--spec/lib/backup/dump/postgres_spec.rb76
-rw-r--r--spec/lib/backup/files_spec.rb56
-rw-r--r--spec/lib/backup/repositories_spec.rb40
-rw-r--r--spec/lib/banzai/filter/custom_emoji_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/quick_action_filter_spec.rb37
-rw-r--r--spec/lib/banzai/filter/references/alert_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/commit_reference_filter_spec.rb14
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/reference_cache_spec.rb12
-rw-r--r--spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb62
-rw-r--r--spec/lib/bitbucket/connection_spec.rb45
-rw-r--r--spec/lib/bitbucket/exponential_backoff_spec.rb62
-rw-r--r--spec/lib/bitbucket/representation/pull_request_spec.rb7
-rw-r--r--spec/lib/bitbucket_server/client_spec.rb21
-rw-r--r--spec/lib/bitbucket_server/connection_spec.rb147
-rw-r--r--spec/lib/bitbucket_server/representation/activity_spec.rb16
-rw-r--r--spec/lib/bitbucket_server/representation/user_spec.rb19
-rw-r--r--spec/lib/bitbucket_server/retry_with_delay_spec.rb60
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb12
-rw-r--r--spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb9
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/logger_spec.rb49
-rw-r--r--spec/lib/bulk_imports/network_error_spec.rb4
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb24
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb263
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb5
-rw-r--r--spec/lib/click_house/connection_spec.rb54
-rw-r--r--spec/lib/click_house/iterator_spec.rb43
-rw-r--r--spec/lib/click_house/migration_support/exclusive_lock_spec.rb140
-rw-r--r--spec/lib/click_house/migration_support/migration_context_spec.rb203
-rw-r--r--spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb61
-rw-r--r--spec/lib/extracts_ref_spec.rb8
-rw-r--r--spec/lib/feature_spec.rb1512
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt4
-rw-r--r--spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt1
-rw-r--r--spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb100
-rw-r--r--spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb213
-rw-r--r--spec/lib/gitlab/access/branch_protection_spec.rb8
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb86
-rw-r--r--spec/lib/gitlab/application_context_spec.rb8
-rw-r--r--spec/lib/gitlab/auth/saml/config_spec.rb35
-rw-r--r--spec/lib/gitlab/auth_spec.rb56
-rw-r--r--spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb76
-rw-r--r--spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb43
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb1
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb124
-rw-r--r--spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb74
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb80
-rw-r--r--spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb105
-rw-r--r--spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb124
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb23
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb559
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb12
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb9
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb132
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb37
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb65
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb10
-rw-r--r--spec/lib/gitlab/checks/container_moved_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/global_file_size_check_spec.rb5
-rw-r--r--spec/lib/gitlab/checks/lfs_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/matching_merge_request_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/project_created_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/push_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/push_file_count_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/single_change_access_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/snippet_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb129
-rw-r--r--spec/lib/gitlab/checks/timed_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/image_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/components/instance_path_spec.rb186
-rw-r--r--spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb58
-rw-r--r--spec/lib/gitlab/ci/config/entry/includes_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb89
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/retry_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/config/entry/workflow_spec.rb110
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/external/file/remote_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb221
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb105
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/jwt_v2_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb73
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb268
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb172
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/source_spec.rb102
-rw-r--r--spec/lib/gitlab/ci/runner_instructions_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/runner_releases_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/runner_upgrade_check_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb77
-rw-r--r--spec/lib/gitlab/ci/variables/downstream/generator_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb96
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb110
-rw-r--r--spec/lib/gitlab/circuit_breaker/notifier_spec.rb37
-rw-r--r--spec/lib/gitlab/circuit_breaker/store_spec.rb201
-rw-r--r--spec/lib/gitlab/circuit_breaker_spec.rb120
-rw-r--r--spec/lib/gitlab/contributions_calendar_spec.rb73
-rw-r--r--spec/lib/gitlab/counters/buffered_counter_spec.rb20
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb13
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb14
-rw-r--r--spec/lib/gitlab/database/decomposition/migrate_spec.rb180
-rw-r--r--spec/lib/gitlab/database/dictionary_spec.rb123
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb45
-rw-r--r--spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb31
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb13
-rw-r--r--spec/lib/gitlab/database/migration_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb11
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb16
-rw-r--r--spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb12
-rw-r--r--spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb6
-rw-r--r--spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb33
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb9
-rw-r--r--spec/lib/gitlab/database/postgres_sequences_spec.rb35
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb4
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb17
-rw-r--r--spec/lib/gitlab/database/schema_migrations/context_spec.rb5
-rw-r--r--spec/lib/gitlab/database/sharding_key_spec.rb153
-rw-r--r--spec/lib/gitlab/database/transaction/observer_spec.rb2
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb27
-rw-r--r--spec/lib/gitlab/doctor/reset_tokens_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb120
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb119
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb61
-rw-r--r--spec/lib/gitlab/email/service_desk/custom_email_spec.rb37
-rw-r--r--spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb25
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb37
-rw-r--r--spec/lib/gitlab/event_store/event_spec.rb39
-rw-r--r--spec/lib/gitlab/event_store/store_spec.rb70
-rw-r--r--spec/lib/gitlab/event_store/subscription_spec.rb142
-rw-r--r--spec/lib/gitlab/exclusive_lease_spec.rb2
-rw-r--r--spec/lib/gitlab/experiment/rollout/feature_spec.rb65
-rw-r--r--spec/lib/gitlab/file_detector_spec.rb6
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb25
-rw-r--r--spec/lib/gitlab/git/compare_spec.rb110
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb95
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb31
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb95
-rw-r--r--spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb23
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb44
-rw-r--r--spec/lib/gitlab/gitaly_client/storage_settings_spec.rb14
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb39
-rw-r--r--spec/lib/gitlab/github_import/client_pool_spec.rb41
-rw-r--r--spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb3
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/importer/events/merged_spec.rb74
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb63
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb9
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/job_delay_calculator_spec.rb33
-rw-r--r--spec/lib/gitlab/github_import/label_finder_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb30
-rw-r--r--spec/lib/gitlab/github_import/object_counter_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb15
-rw-r--r--spec/lib/gitlab/github_import/representation/representable_spec.rb43
-rw-r--r--spec/lib/gitlab/github_import/settings_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import_spec.rb23
-rw-r--r--spec/lib/gitlab/hook_data/project_builder_spec.rb120
-rw-r--r--spec/lib/gitlab/http_spec.rb39
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml7
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb32
-rw-r--r--spec/lib/gitlab/instrumentation/connection_pool_spec.rb69
-rw-r--r--spec/lib/gitlab/instrumentation/redis_base_spec.rb14
-rw-r--r--spec/lib/gitlab/instrumentation/redis_helper_spec.rb136
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb19
-rw-r--r--spec/lib/gitlab/internal_events_spec.rb247
-rw-r--r--spec/lib/gitlab/issuables_count_for_state_spec.rb19
-rw-r--r--spec/lib/gitlab/kas/client_spec.rb14
-rw-r--r--spec/lib/gitlab/markdown_cache/redis/store_spec.rb2
-rw-r--r--spec/lib/gitlab/memory/watchdog_spec.rb13
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb363
-rw-r--r--spec/lib/gitlab/middleware/path_traversal_check_spec.rb23
-rw-r--r--spec/lib/gitlab/middleware/request_context_spec.rb2
-rw-r--r--spec/lib/gitlab/nav/top_nav_menu_header_spec.rb16
-rw-r--r--spec/lib/gitlab/omniauth_initializer_spec.rb117
-rw-r--r--spec/lib/gitlab/pages/deployment_update_spec.rb35
-rw-r--r--spec/lib/gitlab/pages/url_builder_spec.rb157
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb87
-rw-r--r--spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb9
-rw-r--r--spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb73
-rw-r--r--spec/lib/gitlab/puma/error_handler_spec.rb27
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb133
-rw-r--r--spec/lib/gitlab/redis/buffered_counter_spec.rb7
-rw-r--r--spec/lib/gitlab/redis/db_load_balancing_spec.rb36
-rw-r--r--spec/lib/gitlab/redis/sidekiq_status_spec.rb56
-rw-r--r--spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb52
-rw-r--r--spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb2
-rw-r--r--spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb18
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb66
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb37
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb18
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb36
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb4
-rw-r--r--spec/lib/gitlab/tracking/event_definition_spec.rb27
-rw-r--r--spec/lib/gitlab/tracking_spec.rb51
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb13
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb71
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb16
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb55
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb52
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb27
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb90
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb53
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb10
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb21
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb87
-rw-r--r--spec/lib/gitlab/utils/file_info_spec.rb4
-rw-r--r--spec/lib/gitlab/web_ide/default_oauth_application_spec.rb87
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb57
-rw-r--r--spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb98
-rw-r--r--spec/lib/integrations/google_cloud_platform/jwt_spec.rb86
-rw-r--r--spec/lib/organization/current_organization_spec.rb66
-rw-r--r--spec/lib/peek/views/click_house_spec.rb6
-rw-r--r--spec/lib/product_analytics/event_params_spec.rb59
-rw-r--r--spec/lib/sbom/package_url_spec.rb2
-rw-r--r--spec/lib/sidebars/concerns/container_with_html_options_spec.rb6
-rw-r--r--spec/lib/sidebars/explore/menus/catalog_menu_spec.rb34
-rw-r--r--spec/lib/sidebars/groups/menus/scope_menu_spec.rb6
-rw-r--r--spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb1
-rw-r--r--spec/lib/sidebars/organizations/menus/manage_menu_spec.rb20
-rw-r--r--spec/lib/sidebars/projects/menus/repository_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/scope_menu_spec.rb6
-rw-r--r--spec/lib/sidebars/projects/menus/shimo_menu_spec.rb44
-rw-r--r--spec/lib/sidebars/projects/panel_spec.rb22
-rw-r--r--spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb4
-rw-r--r--spec/lib/sidebars/user_settings/menus/password_menu_spec.rb2
-rw-r--r--spec/lib/system_check/base_check_spec.rb2
-rw-r--r--spec/lib/system_check/orphans/namespace_check_spec.rb61
-rw-r--r--spec/lib/system_check/orphans/repository_check_spec.rb68
-rw-r--r--spec/lib/uploaded_file_spec.rb4
-rw-r--r--spec/lib/vite_gdk_spec.rb63
319 files changed, 9877 insertions, 5083 deletions
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb
index b74f5bf2de8..ee0a58a4e53 100644
--- a/spec/lib/api/ci/helpers/runner_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe API::Ci::Helpers::Runner do
end
end
- describe '#current_runner_manager', :freeze_time, feature_category: :runner_fleet do
+ describe '#current_runner_manager', :freeze_time, feature_category: :fleet_visibility do
let(:runner) { create(:ci_runner, token: 'foo') }
let(:runner_manager) { create(:ci_runner_machine, runner: runner, system_xid: 'bar', contacted_at: 1.hour.ago) }
diff --git a/spec/lib/api/entities/ci/job_request/image_spec.rb b/spec/lib/api/entities/ci/job_request/image_spec.rb
index 14d4a074fce..666ec31d3d9 100644
--- a/spec/lib/api/entities/ci/job_request/image_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/image_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
RSpec.describe API::Entities::Ci::JobRequest::Image do
let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }] }
- let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports, pull_policy: ['if-not-present']) }
+ let(:image) do
+ double(name: 'image_name', entrypoint: ['foo'], executor_opts: {}, ports: ports, pull_policy: ['if-not-present'])
+ end
+
let(:entity) { described_class.new(image) }
subject { entity.as_json }
@@ -29,6 +32,10 @@ RSpec.describe API::Entities::Ci::JobRequest::Image do
end
end
+ it 'returns the executor_opts options' do
+ expect(subject[:executor_opts]).to eq({})
+ end
+
it 'returns the pull policy' do
expect(subject[:pull_policy]).to eq(['if-not-present'])
end
diff --git a/spec/lib/api/entities/ci/job_request/service_spec.rb b/spec/lib/api/entities/ci/job_request/service_spec.rb
index 11350f7c41b..c2331799314 100644
--- a/spec/lib/api/entities/ci/job_request/service_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/service_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::Entities::Ci::JobRequest::Service do
::Gitlab::Ci::Build::Image,
name: 'image_name',
entrypoint: ['foo'],
+ executor_opts: {},
ports: ports,
pull_policy: ['if-not-present'],
alias: 'alias',
@@ -25,6 +26,7 @@ RSpec.describe API::Entities::Ci::JobRequest::Service do
expect(result).to eq(
name: 'image_name',
entrypoint: ['foo'],
+ executor_opts: {},
ports: ports,
pull_policy: ['if-not-present'],
alias: 'alias',
diff --git a/spec/lib/api/entities/hook_spec.rb b/spec/lib/api/entities/hook_spec.rb
new file mode 100644
index 00000000000..45648d6fb64
--- /dev/null
+++ b/spec/lib/api/entities/hook_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Hook, feature_category: :webhooks do
+ let(:hook) { create(:project_hook) }
+ let(:with_url_variables) { true }
+ let(:entity) { described_class.new(hook, with_url_variables: with_url_variables) }
+
+ subject(:json) { entity.as_json }
+
+ it 'exposes correct attributes' do
+ expect(json.keys).to contain_exactly(:alert_status, :created_at, :disabled_until, :enable_ssl_verification, :id,
+ :merge_requests_events, :push_events, :repository_update_events, :tag_push_events, :url, :url_variables
+ )
+ end
+
+ context 'when `with_url_variables` is set to false' do
+ let(:with_url_variables) { false }
+
+ it 'does not expose `with_url_variables` field' do
+ expect(json.keys).not_to include(:url_variables)
+ end
+ end
+end
diff --git a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
index 1664d9f18d2..f631a9cb803 100644
--- a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
+++ b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
- let_it_be(:candidate) { build(:ml_candidates) }
+ let_it_be(:candidate) { build_stubbed(:ml_candidates, internal_id: 1) }
- subject { described_class.new(candidate, packages_url: 'http://example.com').as_json }
+ subject { described_class.new(candidate).as_json }
context 'when start_time is nil' do
it { expect(subject[:start_time]).to eq(0) }
@@ -66,8 +66,19 @@ RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do
end
describe 'artifact_uri' do
- it 'is not implemented' do
- expect(subject[:artifact_uri]).to eq("http://example.com#{candidate.artifact_root}")
+ context 'when candidate does not belong to a model version' do
+ it 'returns the generic package (legacy) format of the artifact_uri' do
+ expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/generic#{candidate.artifact_root}")
+ end
+ end
+
+ context 'when candidate belongs to a model version' do
+ let!(:version) { create(:ml_model_versions, :with_package) }
+ let!(:candidate) { version.candidate }
+
+ it 'returns the model version format of the artifact_uri' do
+ expect(subject[:artifact_uri]).to eq("http://localhost/api/v4/projects/#{candidate.project_id}/packages/ml_models/#{version.model.name}/#{version.version}")
+ end
end
end
diff --git a/spec/lib/api/entities/project_import_status_spec.rb b/spec/lib/api/entities/project_import_status_spec.rb
index 5d7f06dc78e..8c397c30b78 100644
--- a/spec/lib/api/entities/project_import_status_spec.rb
+++ b/spec/lib/api/entities/project_import_status_spec.rb
@@ -105,6 +105,7 @@ RSpec.describe API::Entities::ProjectImportStatus, :aggregate_failures, feature_
let(:entity) { described_class.new(project) }
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :fetched, value: 10)
::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :imported, value: 8)
end
diff --git a/spec/lib/api/entities/projects/repository_storage_move_spec.rb b/spec/lib/api/entities/projects/repository_storage_move_spec.rb
index 81f5d98b713..ae6c469bb64 100644
--- a/spec/lib/api/entities/projects/repository_storage_move_spec.rb
+++ b/spec/lib/api/entities/projects/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::Projects::RepositoryStorageMove do
+RSpec.describe API::Entities::Projects::RepositoryStorageMove, feature_category: :source_code_management do
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/lib/api/entities/snippets/repository_storage_move_spec.rb b/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
index a848afbcff9..ceeae230b25 100644
--- a/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
+++ b/spec/lib/api/entities/snippets/repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::Snippets::RepositoryStorageMove do
+RSpec.describe API::Entities::Snippets::RepositoryStorageMove, feature_category: :source_code_management do
describe '#as_json' do
subject { entity.as_json }
diff --git a/spec/lib/api/helpers/import_github_helpers_spec.rb b/spec/lib/api/helpers/import_github_helpers_spec.rb
index 3324e38660c..7f8fbad1273 100644
--- a/spec/lib/api/helpers/import_github_helpers_spec.rb
+++ b/spec/lib/api/helpers/import_github_helpers_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe API::Helpers::ImportGithubHelpers, feature_category: :importers d
helper = Class.new.include(described_class).new
def helper.params = {
personal_access_token: 'foo',
- additional_access_tokens: 'bar',
github_hostname: 'github.example.com'
}
helper
@@ -21,7 +20,7 @@ RSpec.describe API::Helpers::ImportGithubHelpers, feature_category: :importers d
describe '#access_params' do
it 'makes the passed in personal access token and extra tokens accessible' do
- expect(subject.access_params).to eq({ github_access_token: 'foo', additional_access_tokens: 'bar' })
+ expect(subject.access_params).to eq({ github_access_token: 'foo' })
end
end
diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
index 757a73ed612..3e7a0187d86 100644
--- a/spec/lib/api/ml/mlflow/api_helpers_spec.rb
+++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
@@ -5,39 +5,6 @@ require 'spec_helper'
RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
include described_class
- describe '#packages_url' do
- subject { packages_url }
-
- let_it_be(:user_project) { build_stubbed(:project) }
-
- context 'with an empty relative URL root' do
- before do
- allow(Gitlab::Application.routes).to receive(:default_url_options)
- .and_return(protocol: 'http', host: 'localhost', script_name: '')
- end
-
- it { is_expected.to eql("http://localhost/api/v4/projects/#{user_project.id}/packages/generic") }
- end
-
- context 'with a forward slash relative URL root' do
- before do
- allow(Gitlab::Application.routes).to receive(:default_url_options)
- .and_return(protocol: 'http', host: 'localhost', script_name: '/')
- end
-
- it { is_expected.to eql("http://localhost/api/v4/projects/#{user_project.id}/packages/generic") }
- end
-
- context 'with a relative URL root' do
- before do
- allow(Gitlab::Application.routes).to receive(:default_url_options)
- .and_return(protocol: 'http', host: 'localhost', script_name: '/gitlab/root')
- end
-
- it { is_expected.to eql("http://localhost/gitlab/root/api/v4/projects/#{user_project.id}/packages/generic") }
- end
- end
-
describe '#candidates_order_params' do
using RSpec::Parameterized::TableSyntax
@@ -61,4 +28,47 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
end
end
end
+
+ describe '#model_order_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { model_order_params(params) }
+
+ where(:input, :order_by, :sort) do
+ '' | 'name' | 'asc'
+ 'name' | 'name' | 'asc'
+ 'name DESC' | 'name' | 'desc'
+ 'last_updated_timestamp' | 'updated_at' | 'asc'
+ 'last_updated_timestamp asc' | 'updated_at' | 'asc'
+ 'last_updated_timestamp DESC' | 'updated_at' | 'desc'
+ end
+ with_them do
+ let(:params) { { order_by: input } }
+
+ it 'is correct' do
+ is_expected.to include({ order_by: order_by, sort: sort })
+ end
+ end
+ end
+
+ describe '#model_filter_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { model_filter_params(params) }
+
+ where(:input, :output) do
+ '' | {}
+ 'name=""' | { name: '' }
+ 'name=foo' | { name: 'foo' }
+ 'name="foo"' | { name: 'foo' }
+ 'invalid="foo"' | {}
+ end
+ with_them do
+ let(:params) { { filter: input } }
+
+ it 'is correct' do
+ is_expected.to eq(output)
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/database_configuration_spec.rb b/spec/lib/backup/database_configuration_spec.rb
new file mode 100644
index 00000000000..b7fa9f161c1
--- /dev/null
+++ b/spec/lib/backup/database_configuration_spec.rb
@@ -0,0 +1,239 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseConfiguration, :reestablished_active_record_base, feature_category: :backup_restore do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:connection_name) { 'main' }
+
+ subject(:config) { described_class.new(connection_name) }
+
+ describe '#initialize' do
+ it 'initializes with the provided connection_name' do
+ expect_next_instance_of(described_class) do |config|
+ expect(config.connection_name).to eq(connection_name)
+ end
+
+ config
+ end
+ end
+
+ describe '#activerecord_configuration' do
+ it 'returns a ActiveRecord::DatabaseConfigurations::HashConfig' do
+ expect(config.activerecord_configuration).to be_a ActiveRecord::DatabaseConfigurations::HashConfig
+ end
+ end
+
+ context 'with configuration override feature' do
+ let(:application_config) do
+ {
+ adapter: 'postgresql',
+ host: 'some_host',
+ port: '5432'
+ }
+ end
+
+ let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
+
+ before do
+ allow(config).to receive(:original_activerecord_config).and_return(application_config)
+ end
+
+ shared_context 'with generic database with overridden values' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_PGPORT' | '1543'
+ 'GITLAB_BACKUP_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ shared_context 'with generic database with overridden values using current database prefix' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_MAIN_PGPORT' | '1543'
+ 'GITLAB_BACKUP_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ shared_context 'with generic database with overridden values for a different database prefix' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_CI_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_CI_PGPORT' | '1543'
+ 'GITLAB_BACKUP_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ describe('#pg_env_variables') do
+ context 'with provided ENV variables' do
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ context 'when generic database configuration is overridden' do
+ include_context "with generic database with overridden values"
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(config.pg_env_variables).to include({ pg_env => overridden_value })
+ end
+ end
+ end
+
+ context 'when specific database configuration is overridden' do
+ context 'and environment variables are for the current database name' do
+ include_context 'with generic database with overridden values using current database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(config.pg_env_variables).to include({ pg_env => overridden_value })
+ end
+ end
+ end
+
+ context 'and environment variables are for another database' do
+ include_context 'with generic database with overridden values for a different database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
+
+ it 'PostgreSQL ENV is expected to equal application configuration' do
+ expect(config.pg_env_variables).to eq(
+ {
+ 'PGHOST' => application_config[:host],
+ 'PGPORT' => application_config[:port]
+ }
+ )
+ end
+ end
+ end
+ end
+ end
+
+ context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
+ it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
+ stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
+ stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specific_user')
+
+ expect(config.pg_env_variables['PGUSER']).to eq('specific_user')
+ end
+ end
+ end
+
+ describe('#activerecord_variables') do
+ context 'with provided ENV variables' do
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ context 'when generic database configuration is overridden' do
+ include_context "with generic database with overridden values"
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(config.activerecord_variables).to eq(
+ application_config.merge(active_record_key => overridden_value)
+ )
+ end
+ end
+ end
+
+ context 'when specific database configuration is overridden' do
+ context 'and environment variables are for the current database name' do
+ include_context 'with generic database with overridden values using current database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(config.activerecord_variables).to eq(
+ application_config.merge(active_record_key => overridden_value)
+ )
+ end
+ end
+ end
+
+ context 'and environment variables are for another database' do
+ include_context 'with generic database with overridden values for a different database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
+
+ it 'ActiveRecord backup configuration is expected to equal application configuration' do
+ expect(config.activerecord_variables).to eq(application_config)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
+ with_them do
+ it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
+ stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
+ stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specific_user')
+
+ expect(config.activerecord_variables[:username]).to eq('specific_user')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/database_connection_spec.rb b/spec/lib/backup/database_connection_spec.rb
new file mode 100644
index 00000000000..b56da3d99f7
--- /dev/null
+++ b/spec/lib/backup/database_connection_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseConnection, :reestablished_active_record_base, feature_category: :backup_restore do
+ let(:connection_name) { 'main' }
+ let(:snapshot_id_pattern) { /[A-Z0-9]{8}-[A-Z0-9]{8}-[0-9]/ }
+
+ subject(:backup_connection) { described_class.new(connection_name) }
+
+ describe '#initialize' do
+ it 'initializes database_configuration with the provided connection_name' do
+ expect(Backup::DatabaseConfiguration).to receive(:new).with(connection_name).and_call_original
+
+ backup_connection
+ end
+ end
+
+ describe '#connection_name' do
+ it 'returns the same connection name used during initialization' do
+ expect(backup_connection.connection_name).to eq(connection_name)
+ end
+ end
+
+ describe '#connection' do
+ it 'is an instance of a ActiveRecord::Base.connection' do
+ backup_connection.connection.is_a? Gitlab::Database::LoadBalancing::ConnectionProxy
+ end
+ end
+
+ describe '#database_configuration' do
+ it 'returns database configuration' do
+ expect(backup_connection.database_configuration).to be_a(Backup::DatabaseConfiguration)
+ end
+ end
+
+ describe '#snapshot_id' do
+ it "returns nil when snapshot has not been triggered" do
+ expect(backup_connection.snapshot_id).to be_nil
+ end
+
+ context 'when a snapshot transaction is open', :delete do
+ let!(:snapshot_id) { backup_connection.export_snapshot! }
+
+ it 'returns the snapshot_id in the expected format' do
+ expect(backup_connection.snapshot_id).to match(snapshot_id_pattern)
+ end
+
+ it 'returns the snapshot_id equal to the one returned by #export_snapshot!' do
+ expect(backup_connection.snapshot_id).to eq(snapshot_id)
+ end
+
+ it "returns nil after a snapshot is released" do
+ backup_connection.release_snapshot!
+
+ expect(backup_connection.snapshot_id).to be_nil
+ end
+ end
+ end
+
+ describe '#export_snapshot!', :delete do
+ it 'returns a snapshot_id in the expected format' do
+ expect(backup_connection.export_snapshot!).to match(snapshot_id_pattern)
+ end
+
+ it 'opens a transaction with correct isolation format and triggers a snapshot generation' do
+ expect(backup_connection.connection).to receive(:begin_transaction).with(
+ isolation: :repeatable_read
+ ).and_call_original
+
+ expect(backup_connection.connection).to receive(:select_value).with(
+ "SELECT pg_export_snapshot()"
+ ).and_call_original
+
+ backup_connection.export_snapshot!
+ end
+
+ it 'disables transaction time out' do
+ expect_next_instance_of(Gitlab::Database::TransactionTimeoutSettings) do |transaction_settings|
+ expect(transaction_settings).to receive(:disable_timeouts).and_call_original
+ end
+
+ backup_connection.export_snapshot!
+ end
+ end
+
+ describe '#release_snapshot!', :delete do
+ it 'clears out existing snapshot_id' do
+ snapshot_id = backup_connection.export_snapshot!
+
+ expect { backup_connection.release_snapshot! }.to change { backup_connection.snapshot_id }
+ .from(snapshot_id).to(nil)
+ end
+
+ it 'executes a transaction rollback' do
+ backup_connection.export_snapshot!
+
+ expect(backup_connection.connection).to receive(:rollback_transaction).and_call_original
+
+ backup_connection.release_snapshot!
+ end
+ end
+end
diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb
index c9d036b37f8..9fab5cbc1c0 100644
--- a/spec/lib/backup/database_model_spec.rb
+++ b/spec/lib/backup/database_model_spec.rb
@@ -8,10 +8,10 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
let(:gitlab_database_name) { 'main' }
describe '#connection' do
- subject { described_class.new(gitlab_database_name).connection }
+ subject(:connection) { described_class.new(gitlab_database_name).connection }
it 'an instance of a ActiveRecord::Base.connection' do
- subject.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases
+ connection.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases -- We actually need an ActiveRecord::Base here
end
end
@@ -24,7 +24,7 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
}
end
- subject { described_class.new(gitlab_database_name).config }
+ subject(:config) { described_class.new(gitlab_database_name).config }
before do
allow(
@@ -34,11 +34,11 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
shared_examples 'no configuration is overridden' do
it 'ActiveRecord backup configuration is expected to equal application configuration' do
- expect(subject[:activerecord]).to eq(application_config)
+ expect(config[:activerecord]).to eq(application_config)
end
it 'PostgreSQL ENV is expected to equal application configuration' do
- expect(subject[:pg_env]).to eq(
+ expect(config[:pg_env]).to eq(
{
'PGHOST' => application_config[:host],
'PGPORT' => application_config[:port]
@@ -51,11 +51,11 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
it 'ActiveRecord backup configuration overrides application configuration' do
- expect(subject[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
+ expect(config[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
end
it 'PostgreSQL ENV overrides application configuration' do
- expect(subject[:pg_env]).to include({ pg_env => overridden_value })
+ expect(config[:pg_env]).to include({ pg_env => overridden_value })
end
end
@@ -63,7 +63,7 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
it_behaves_like 'no configuration is overridden'
end
- context 'when GITLAB_BACKUP_PG* variables are set' do
+ context 'when generic database configuration is overridden' do
where(:env_variable, :overridden_value) do
'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
'GITLAB_BACKUP_PGUSER' | 'some_user'
@@ -75,10 +75,20 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
end
with_them do
- let(:pg_env) { env_variable[/GITLAB_BACKUP_(\w+)/, 1] }
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
before do
stub_env(env_variable, overridden_value)
@@ -88,7 +98,7 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
end
end
- context 'when GITLAB_BACKUP_<DBNAME>_PG* variables are set' do
+ context 'when specific database configuration is overridden' do
context 'and environment variables are for the current database name' do
where(:env_variable, :overridden_value) do
'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
@@ -101,10 +111,20 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
end
with_them do
- let(:pg_env) { env_variable[/GITLAB_BACKUP_MAIN_(\w+)/, 1] }
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
before do
stub_env(env_variable, overridden_value)
@@ -126,10 +146,20 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
end
with_them do
- let(:pg_env) { env_variable[/GITLAB_BACKUP_CI_(\w+)/, 1] }
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
before do
stub_env(env_variable, overridden_value)
@@ -146,7 +176,6 @@ RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature
end
it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
- config = subject
expect(config.dig(:activerecord, :username)).to eq('specfic_user')
expect(config.dig(:pg_env, 'PGUSER')).to eq('specfic_user')
end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 073efbbbfcc..86468689f76 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Backup::Database, :reestablished_active_record_base, feature_category: :backup_restore do
let(:progress) { StringIO.new }
- let(:output) { progress.string }
+ let(:progress_output) { progress.string }
let(:backup_id) { 'some_id' }
let(:one_database_configured?) { base_models_for_backup.one? }
let(:timeout_service) do
@@ -48,28 +48,16 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'uses snapshots' do
Dir.mktmpdir do |dir|
- expect_next_instances_of(Backup::DatabaseModel, 2) do |adapter|
- expect(adapter.connection).to receive(:begin_transaction).with(
- isolation: :repeatable_read
- ).and_call_original
- expect(adapter.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_call_original
- expect(adapter.connection).to receive(:rollback_transaction).and_call_original
- end
+ expect_next_instances_of(Backup::DatabaseConnection, 2) do |backup_connection|
+ expect(backup_connection).to receive(:export_snapshot!).and_call_original
- subject.dump(dir, backup_id)
- end
- end
+ expect_next_instance_of(::Gitlab::Backup::Cli::Utils::PgDump) do |pgdump|
+ expect(pgdump.snapshot_id).to eq(backup_connection.snapshot_id)
+ end
- it 'disables transaction time out' do
- number_of_databases = base_models_for_backup.count
- expect(Gitlab::Database::TransactionTimeoutSettings)
- .to receive(:new).exactly(2 * number_of_databases).times.and_return(timeout_service)
- expect(timeout_service).to receive(:disable_timeouts).exactly(number_of_databases).times
- expect(timeout_service).to receive(:restore_timeouts).exactly(number_of_databases).times
+ expect(backup_connection).to receive(:release_snapshot!).and_call_original
+ end
- Dir.mktmpdir do |dir|
subject.dump(dir, backup_id)
end
end
@@ -82,79 +70,18 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'does not use snapshots' do
Dir.mktmpdir do |dir|
- base_model = Backup::DatabaseModel.new('main')
- expect(base_model.connection).not_to receive(:begin_transaction).with(
- isolation: :repeatable_read
- ).and_call_original
- expect(base_model.connection).not_to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_call_original
- expect(base_model.connection).not_to receive(:rollback_transaction).and_call_original
-
- subject.dump(dir, backup_id)
- end
- end
- end
-
- describe 'pg_dump arguments' do
- let(:snapshot_id) { 'fake_id' }
- let(:default_pg_args) do
- args = [
- '--clean',
- '--if-exists'
- ]
-
- if Gitlab::Database.database_mode == Gitlab::Database::MODE_MULTIPLE_DATABASES
- args + ["--snapshot=#{snapshot_id}"]
- else
- args
- end
- end
-
- let(:dumper) { double }
- let(:destination_dir) { 'tmp' }
-
- before do
- allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper)
- allow(dumper).to receive(:dump).with(any_args).and_return(true)
- end
+ expect_next_instance_of(Backup::DatabaseConnection) do |backup_connection|
+ expect(backup_connection).not_to receive(:export_snapshot!)
- shared_examples 'pg_dump arguments' do
- it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
- number_of_databases = base_models_for_backup.count
- if number_of_databases > 1
- expect_next_instances_of(Backup::DatabaseModel, number_of_databases) do |model|
- expect(model.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_return(snapshot_id)
+ expect_next_instance_of(::Gitlab::Backup::Cli::Utils::PgDump) do |pgdump|
+ expect(pgdump.snapshot_id).to be_nil
end
- end
-
- expect(dumper).to receive(:dump).with(anything, anything, expected_pg_args)
-
- subject.dump(destination_dir, backup_id)
- end
- end
-
- context 'when no PostgreSQL schemas are specified' do
- let(:expected_pg_args) { default_pg_args }
- include_examples 'pg_dump arguments'
- end
-
- context 'when a PostgreSQL schema is used' do
- let(:schema) { 'gitlab' }
- let(:expected_pg_args) do
- default_pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema|
- ['-n', schema.to_s]
+ expect(backup_connection).not_to receive(:release_snapshot!)
end
- end
- before do
- allow(Gitlab.config.backup).to receive(:pg_schema).and_return(schema)
+ subject.dump(dir, backup_id)
end
-
- include_examples 'pg_dump arguments'
end
end
@@ -223,7 +150,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
subject.restore(backup_dir, backup_id)
- expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
+ expect(progress_output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
end
it 'has a pre restore warning' do
@@ -241,9 +168,21 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
subject.restore(backup_dir, backup_id)
- expect(output).to include("Restoring PostgreSQL database")
- expect(output).to include("[DONE]")
- expect(output).not_to include("ERRORS")
+ expect(progress_output).to include("Restoring PostgreSQL database")
+ expect(progress_output).to include("[DONE]")
+ expect(progress_output).not_to include("ERRORS")
+ end
+
+ context 'when DECOMPRESS_CMD is set to tee' do
+ before do
+ stub_env('DECOMPRESS_CMD', 'tee')
+ end
+
+ it 'outputs a message about DECOMPRESS_CMD' do
+ expect do
+ subject.restore(backup_dir, backup_id)
+ end.to output(/Using custom DECOMPRESS_CMD 'tee'/).to_stdout
+ end
end
end
@@ -277,9 +216,9 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
subject.restore(backup_dir, backup_id)
- expect(output).to include("ERRORS")
- expect(output).not_to include(noise)
- expect(output).to include(visible_error)
+ expect(progress_output).to include("ERRORS")
+ expect(progress_output).not_to include(noise)
+ expect(progress_output).to include(visible_error)
expect(subject.post_restore_warning).not_to be_nil
end
end
diff --git a/spec/lib/backup/dump/postgres_spec.rb b/spec/lib/backup/dump/postgres_spec.rb
index f6a68ab6db9..1da2ee950db 100644
--- a/spec/lib/backup/dump/postgres_spec.rb
+++ b/spec/lib/backup/dump/postgres_spec.rb
@@ -3,17 +3,22 @@
require 'spec_helper'
RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do
- describe '#dump' do
- let(:pg_database) { 'gitlabhq_test' }
- let(:destination_dir) { Dir.mktmpdir }
- let(:db_file_name) { File.join(destination_dir, 'output.gz') }
+ let(:pg_database) { 'gitlabhq_test' }
+ let(:pg_dump) { ::Gitlab::Backup::Cli::Utils::PgDump.new(database_name: pg_database) }
+ let(:default_compression_cmd) { 'gzip -c -1' }
- let(:pipes) { IO.pipe }
- let(:gzip_pid) { spawn('gzip -c -1', in: pipes[0], out: [db_file_name, 'w', 0o600]) }
- let(:pg_dump_pid) { Process.spawn('pg_dump', *args, pg_database, out: pipes[1]) }
- let(:args) { ['--help'] }
+ subject(:postgres) { described_class.new }
- subject { described_class.new }
+ describe '#compress_cmd' do
+ it 'returns default compression command' do
+ expect(postgres.compress_cmd).to eq(default_compression_cmd)
+ end
+ end
+
+ describe '#dump' do
+ let(:pipes) { IO.pipe }
+ let(:destination_dir) { Dir.mktmpdir }
+ let(:dump_file_name) { File.join(destination_dir, 'output.gz') }
before do
allow(IO).to receive(:pipe).and_return(pipes)
@@ -23,14 +28,55 @@ RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do
FileUtils.remove_entry destination_dir
end
- it 'creates gzipped dump using supplied arguments' do
- expect(subject).to receive(:spawn).with('gzip -c -1', in: pipes.first,
- out: [db_file_name, 'w', 0o600]).and_return(gzip_pid)
- expect(Process).to receive(:spawn).with('pg_dump', *args, pg_database, out: pipes[1]).and_return(pg_dump_pid)
+ context 'with default compression method' do
+ it 'creates a dump file' do
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+
+ it 'default compression command is used' do
+ compressor_pid = spawn(default_compression_cmd, in: pipes[0], out: [dump_file_name, 'w', 0o600])
+
+ expect(postgres).to receive(:spawn).with(
+ default_compression_cmd,
+ in: pipes.first,
+ out: [dump_file_name, 'w', 0o600]).and_return(compressor_pid)
+
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+ end
+
+ context 'when COMPRESS_CMD is set to tee' do
+ let(:tee_pid) { spawn('tee', in: pipes[0], out: [dump_file_name, 'w', 0o600]) }
+
+ before do
+ stub_env('COMPRESS_CMD', 'tee')
+ end
+
+ it 'creates a dump file' do
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+
+ it 'passes through tee instead of gzip' do
+ custom_compression_command = 'tee'
+ compressor_pid = spawn(custom_compression_command, in: pipes[0], out: [dump_file_name, 'w', 0o600])
+
+ expect(postgres).to receive(:spawn).with(
+ custom_compression_command,
+ in: pipes.first,
+ out: [dump_file_name, 'w', 0o600]).and_return(compressor_pid)
- subject.dump(pg_database, db_file_name, args)
+ expect do
+ postgres.dump(dump_file_name, pg_dump)
+ end.to output(/Using custom COMPRESS_CMD 'tee'/).to_stdout
- expect(File.exist?(db_file_name)).to eq(true)
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
end
end
end
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index 48c89e06dfa..f0fc829764a 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'calls tar command with unlink' do
expect(subject).to receive(:tar).and_return('blabla-tar')
- expect(subject).to receive(:run_pipeline!).with([%w[gzip -cd], %w[blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -]], any_args)
+ expect(subject).to receive(:run_pipeline!).with(["gzip -cd", %w[blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -]], any_args)
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
subject.restore('registry.tar.gz', 'backup_id')
end
@@ -107,6 +107,21 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
expect { subject.restore('registry.tar.gz', 'backup_id') }.to raise_error(/is a mountpoint/)
end
end
+
+ describe 'with DECOMPRESS_CMD' do
+ before do
+ stub_env('DECOMPRESS_CMD', 'tee')
+ allow(subject).to receive(:pipeline_succeeded?).and_return(true)
+ end
+
+ it 'passes through tee instead of gzip' do
+ expect(subject).to receive(:run_pipeline!).with(['tee', anything], any_args).and_return([[true, true], ''])
+
+ expect do
+ subject.restore('registry.tar.gz', 'backup_id')
+ end.to output(/Using custom DECOMPRESS_CMD 'tee'/).to_stdout
+ end
+ end
end
describe '#dump' do
@@ -173,6 +188,37 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
.and raise_error(/Failed to create compressed file/)
end
end
+
+ describe 'with COMPRESS_CMD' do
+ before do
+ stub_env('COMPRESS_CMD', 'tee')
+ end
+
+ it 'passes through tee instead of gzip' do
+ expect(subject).to receive(:run_pipeline!).with([anything, 'tee'], any_args)
+ expect do
+ subject.dump('registry.tar.gz', 'backup_id')
+ end.to output(/Using custom COMPRESS_CMD 'tee'/).to_stdout
+ end
+ end
+
+ context 'when GZIP_RSYNCABLE is "yes"' do
+ before do
+ stub_env('GZIP_RSYNCABLE', 'yes')
+ end
+
+ it 'gzips the files with rsyncable option' do
+ expect(subject).to receive(:run_pipeline!).with([anything, 'gzip --rsyncable -c -1'], any_args)
+ subject.dump('registry.tar.gz', 'backup_id')
+ end
+ end
+
+ context 'when GZIP_RSYNCABLE is not set' do
+ it 'gzips the files without the rsyncable option' do
+ expect(subject).to receive(:run_pipeline!).with([anything, 'gzip -c -1'], any_args)
+ subject.dump('registry.tar.gz', 'backup_id')
+ end
+ end
end
describe '#exclude_dirs' do
@@ -226,13 +272,13 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'returns true if both tar and gzip succeeeded' do
expect(
- subject.pipeline_succeeded?(tar_status: status_0, gzip_status: status_0, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_0, compress_status: status_0, output: 'any_output')
).to be_truthy
end
it 'returns false if gzip failed' do
expect(
- subject.pipeline_succeeded?(tar_status: status_1, gzip_status: status_1, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_1, output: 'any_output')
).to be_falsey
end
@@ -243,7 +289,7 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'returns true' do
expect(
- subject.pipeline_succeeded?(tar_status: status_1, gzip_status: status_0, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
).to be_truthy
end
end
@@ -255,7 +301,7 @@ RSpec.describe Backup::Files, feature_category: :backup_restore do
it 'returns false' do
expect(
- subject.pipeline_succeeded?(tar_status: status_1, gzip_status: status_0, output: 'any_output')
+ subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
).to be_falsey
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index ad5fb8ea84e..024f6c5db96 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -90,10 +90,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
let_it_be(:project) { create(:project_with_design, :repository) }
before do
- stub_storage_settings('test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings('test_second_storage' => {})
end
it 'calls enqueue for all repositories on the specified storage', :aggregate_failures do
@@ -249,44 +246,11 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
end
- context 'cleanup snippets' do
- before do
- error_response = ServiceResponse.error(message: "Repository has more than one branch")
- allow(Snippets::RepositoryValidationService).to receive_message_chain(:new, :execute).and_return(error_response)
- end
-
- it 'shows the appropriate error' do
- subject.restore(destination, backup_id)
-
- expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
- expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
- end
-
- it 'removes the snippets from the DB' do
- expect { subject.restore(destination, backup_id) }.to change(PersonalSnippet, :count).by(-1)
- .and change(ProjectSnippet, :count).by(-1)
- .and change(SnippetRepository, :count).by(-2)
- end
-
- it 'removes the repository from disk' do
- gitlab_shell = Gitlab::Shell.new
- shard_name = personal_snippet.repository.shard
- path = personal_snippet.disk_path + '.git'
-
- subject.restore(destination, backup_id)
-
- expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
- end
- end
-
context 'storages' do
let(:storages) { %w[default] }
before do
- stub_storage_settings('test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings('test_second_storage' => {})
end
it 'calls enqueue for all repositories on the specified storage', :aggregate_failures do
diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
index 7fd25eac81b..4fc9d9dd4f6 100644
--- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
+++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
@@ -55,4 +55,12 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter, feature_category: :team_planni
filter('<p>:tanuki:</p> <p>:party-parrot:</p>')
end.not_to exceed_all_query_limit(control_count.count)
end
+
+ it 'uses custom emoji from ancestor group' do
+ subgroup = create(:group, parent: group)
+
+ doc = filter('<p>:tanuki:</p>', group: subgroup)
+
+ expect(doc.css('gl-emoji').size).to eq 1
+ end
end
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
index 251e6efe50b..b4fb715b8f0 100644
--- a/spec/lib/banzai/filter/markdown_filter_spec.rb
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
include FilterSpecHelper
describe 'markdown engine from context' do
@@ -22,6 +23,21 @@ RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning
end
end
+ describe 'parse_sourcepos' do
+ where(:sourcepos, :expected) do
+ '1:1-1:4' | { start: { row: 0, col: 0 }, end: { row: 0, col: 3 } }
+ '12:22-1:456' | { start: { row: 11, col: 21 }, end: { row: 0, col: 455 } }
+ '0:0-0:0' | { start: { row: 0, col: 0 }, end: { row: 0, col: 0 } }
+ '-1:2-3:-4' | nil
+ end
+
+ with_them do
+ it 'correctly parses' do
+ expect(described_class.parse_sourcepos(sourcepos)).to eq expected
+ end
+ end
+ end
+
describe 'code block' do
it 'adds language to lang attribute when specified' do
result = filter("```html\nsome code\n```", no_sourcepos: true)
diff --git a/spec/lib/banzai/filter/quick_action_filter_spec.rb b/spec/lib/banzai/filter/quick_action_filter_spec.rb
new file mode 100644
index 00000000000..a2a300d157c
--- /dev/null
+++ b/spec/lib/banzai/filter/quick_action_filter_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::QuickActionFilter, feature_category: :team_planning do
+ let(:result) { {} }
+
+ it 'detects action in paragraph' do
+ described_class.call('<p data-sourcepos="1:1-2:3">/quick</p>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to match_array [{ start_line: 0, end_line: 1 }]
+ end
+
+ it 'does not detect action in paragraph if no sourcepos' do
+ described_class.call('<p>/quick</p>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does not detect action in blockquote' do
+ described_class.call('<blockquote data-sourcepos="1:1-1:1">/quick</blockquote>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does not detect action in html block' do
+ described_class.call('<li data-sourcepos="1:1-1:1">/quick</li>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does not detect action in code block' do
+ described_class.call('<code data-sourcepos="1:1-1:1">/quick</code>', {}, result)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+end
diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
index 9723e9b39f1..9a2e68aaae0 100644
--- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
@@ -240,9 +240,15 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
# Since we're not batching alert queries across projects,
# we have to account for that.
- # 1 for both projects, 1 for alerts in each project == 3
+ # 1 for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1x2 for alerts in each project
+ # Total == 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count += 2
+ max_count += 6
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
index 6e0f9eda0e2..35a3f20f7b7 100644
--- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
@@ -287,12 +287,18 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
reference_filter(markdown)
end.count
- markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
+ expect(max_count).to eq 0
+
+ markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
# Commits are not DB entries, they are on the project itself.
- # So adding commits from two more projects to the markdown should
- # only increase by 1 query
- max_count += 1
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # Total = 5
+ max_count += 5
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index a4587b70dfa..81b08a4c516 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -747,10 +747,16 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
# Since we're not batching label queries across projects/groups,
# queries increase when a new project/group is added.
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- # first reference to already loaded project (1),
- # second reference requires project and namespace (2), and label (1)
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1 for the group
+ # 1x2 for labels
+ # Total == 8
markdown = "#{project_reference} #{group2_reference}"
- max_count = control_count + 3
+ max_count = control_count + 7
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index 1fa62d70b72..e778f07227c 100644
--- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -522,7 +522,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_cat
# queries increase when a new project/group is added.
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
markdown = "#{project_reference} #{group2_reference}"
- control_count += 5
+ control_count += 9
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index 9433862ac8a..c55fff78756 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ
reference_filter(markdown)
end.count
- expect(max_count).to eq 1
+ expect(max_count).to eq 2
markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}"
diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb
index 577e4471433..04877931610 100644
--- a/spec/lib/banzai/filter/references/reference_cache_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb
@@ -79,8 +79,16 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
expect(control_count).to eq 3
# Since this is an issue filter that is not batching issue queries
# across projects, we have to account for that.
- # 1 for original issue, 2 for second route/project, 1 for other issue
- max_count = control_count + 4
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1x2 for issues
+ # 1x2 for groups
+ # 1x2 for work_item_types
+ # Total = 11
+ max_count = control_count + 8
expect do
cache.load_references_per_parent(filter.nodes)
diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
index b196d85ba8a..00eac7262f4 100644
--- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
@@ -239,9 +239,15 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
# Since we're not batching snippet queries across projects,
# we have to account for that.
- # 1 for both projects, 1 for snippets in each project == 3
+ # 1 for for routes to find routes.source_id of projects matching paths
+ # 1 for projects belonging to the above routes
+ # 1 for preloading routes of the projects
+ # 1 for loading the namespaces associated to the project
+ # 1 for loading the routes associated with the namespace
+ # 1x2 for snippets in each project == 2
+ # Total = 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count = control_count + 2
+ max_count = control_count + 6
expect do
reference_filter(markdown)
diff --git a/spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb b/spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb
new file mode 100644
index 00000000000..cce69b9baf0
--- /dev/null
+++ b/spec/lib/banzai/pipeline/quick_action_pipeline_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Pipeline::QuickActionPipeline, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
+
+ it 'does not detect a quick action' do
+ markdown = <<~MD.strip
+ <!-- HTML comment -->
+ A paragraph
+
+ > a blockquote
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+
+ it 'does detect a quick action' do
+ markdown = <<~MD.strip
+ <!-- HTML comment -->
+ /quick
+
+ > a blockquote
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs]).to match_array [{ start_line: 1, end_line: 1 }]
+ end
+
+ it 'does detect a multiple quick actions but not in a multi-line blockquote' do
+ markdown = <<~MD.strip
+ Lorem ipsum
+ /quick
+ /action
+
+ >>>
+ /quick
+ >>>
+
+ /action
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs])
+ .to match_array [{ start_line: 0, end_line: 2 }, { start_line: 8, end_line: 8 }]
+ end
+
+ it 'does not a quick action in a code block' do
+ markdown = <<~MD.strip
+ ```
+ Lorem ipsum
+ /quick
+ /action
+ ```
+ MD
+ result = described_class.call(markdown, project: nil)
+
+ expect(result[:quick_action_paragraphs]).to be_empty
+ end
+end
diff --git a/spec/lib/bitbucket/connection_spec.rb b/spec/lib/bitbucket/connection_spec.rb
index 2b35a37558c..6cf010f2eed 100644
--- a/spec/lib/bitbucket/connection_spec.rb
+++ b/spec/lib/bitbucket/connection_spec.rb
@@ -19,6 +19,10 @@ RSpec.describe Bitbucket::Connection, feature_category: :integrations do
token_url: OmniAuth::Strategies::Bitbucket.default_options[:client_options]['token_url']
}
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:retry_with_exponential_backoff).and_call_original
+ end
+
expect(OAuth2::Client)
.to receive(:new)
.with(anything, anything, expected_client_options)
@@ -31,6 +35,47 @@ RSpec.describe Bitbucket::Connection, feature_category: :integrations do
connection.get('/users')
end
+
+ context 'when the API returns an error' do
+ before do
+ allow_next_instance_of(OAuth2::AccessToken) do |instance|
+ allow(instance).to receive(:get).and_raise(OAuth2::Error, 'some error')
+ end
+
+ stub_const('Bitbucket::ExponentialBackoff::INITIAL_DELAY', 0.0)
+ allow(Random).to receive(:rand).and_return(0.001)
+ end
+
+ it 'logs the retries and raises an error if it does not succeed on retry' do
+ expect(Gitlab::BitbucketImport::Logger).to receive(:info)
+ .with(message: 'Retrying in 0.0 seconds due to some error')
+ .twice
+
+ connection = described_class.new({ token: token })
+
+ expect { connection.get('/users') }.to raise_error(Bitbucket::ExponentialBackoff::RateLimitError)
+ end
+ end
+
+ context 'when the bitbucket_importer_exponential_backoff feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_importer_exponential_backoff: false)
+ end
+
+ it 'does not run with exponential backoff' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).not_to receive(:retry_with_exponential_backoff).and_call_original
+ end
+
+ expect_next_instance_of(OAuth2::AccessToken) do |instance|
+ expect(instance).to receive(:get).and_return(double(parsed: true))
+ end
+
+ connection = described_class.new({ token: token })
+
+ connection.get('/users')
+ end
+ end
end
describe '#expired?' do
diff --git a/spec/lib/bitbucket/exponential_backoff_spec.rb b/spec/lib/bitbucket/exponential_backoff_spec.rb
new file mode 100644
index 00000000000..b52a83731f4
--- /dev/null
+++ b/spec/lib/bitbucket/exponential_backoff_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Bitbucket::ExponentialBackoff, feature_category: :importers do
+ let(:service) { dummy_class.new }
+ let(:body) { 'test' }
+ let(:parsed_response) { instance_double(Net::HTTPResponse, body: body.to_json) }
+ let(:response) { double(Faraday::Response, body: body, parsed: parsed_response) }
+ let(:response_caller) { -> { response } }
+
+ let(:dummy_class) do
+ Class.new do
+ def logger
+ @logger ||= Logger.new(File::NULL)
+ end
+
+ def dummy_method(response_caller)
+ retry_with_exponential_backoff do
+ response_caller.call
+ end
+ end
+
+ include Bitbucket::ExponentialBackoff
+ end
+ end
+
+ subject(:execute) { service.dummy_method(response_caller) }
+
+ describe '.retry_with_exponential_backoff' do
+ let(:max_retries) { described_class::MAX_RETRIES }
+
+ context 'when the function succeeds on the first try' do
+ it 'calls the function once and returns its result' do
+ expect(response_caller).to receive(:call).once.and_call_original
+
+ expect(Gitlab::Json.parse(execute.parsed.body)).to eq(body)
+ end
+ end
+
+ context 'when the function response is an error' do
+ let(:error) { 'Rate limit for this resource has been exceeded' }
+
+ before do
+ stub_const("#{described_class.name}::INITIAL_DELAY", 0.0)
+ allow(Random).to receive(:rand).and_return(0.001)
+ end
+
+ it 'raises a RateLimitError if the maximum number of retries is exceeded' do
+ allow(response_caller).to receive(:call).and_raise(OAuth2::Error, error)
+
+ message = "Maximum number of retries (#{max_retries}) exceeded. #{error}"
+
+ expect do
+ execute
+ end.to raise_error(described_class::RateLimitError, message)
+
+ expect(response_caller).to have_received(:call).exactly(max_retries).times
+ end
+ end
+ end
+end
diff --git a/spec/lib/bitbucket/representation/pull_request_spec.rb b/spec/lib/bitbucket/representation/pull_request_spec.rb
index 9ebf59ecf82..9aeeef320ed 100644
--- a/spec/lib/bitbucket/representation/pull_request_spec.rb
+++ b/spec/lib/bitbucket/representation/pull_request_spec.rb
@@ -74,11 +74,13 @@ RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :import
'title' => 'title',
'source' => {
'branch' => { 'name' => 'source-branch-name' },
- 'commit' => { 'hash' => 'source-commit-hash' }
+ 'commit' => { 'hash' => 'source-commit-hash' },
+ 'repository' => { 'uuid' => 'uuid' }
},
'destination' => {
'branch' => { 'name' => 'destination-branch-name' },
- 'commit' => { 'hash' => 'destination-commit-hash' }
+ 'commit' => { 'hash' => 'destination-commit-hash' },
+ 'repository' => { 'uuid' => 'uuid' }
},
'merge_commit' => { 'hash' => 'merge-commit-hash' },
'reviewers' => [
@@ -101,6 +103,7 @@ RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :import
target_branch_sha: 'destination-commit-hash',
title: 'title',
updated_at: 'updated-at',
+ source_and_target_project_different: false,
reviewers: ['user-2']
}
diff --git a/spec/lib/bitbucket_server/client_spec.rb b/spec/lib/bitbucket_server/client_spec.rb
index cd3179f19d4..0d027234a0d 100644
--- a/spec/lib/bitbucket_server/client_spec.rb
+++ b/spec/lib/bitbucket_server/client_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BitbucketServer::Client do
+RSpec.describe BitbucketServer::Client, feature_category: :importers do
let(:base_uri) { 'https://test:7990/stash/' }
let(:options) { { base_uri: base_uri, user: 'bitbucket', password: 'mypassword' } }
let(:project) { 'SOME-PROJECT' }
@@ -80,6 +80,25 @@ RSpec.describe BitbucketServer::Client do
end
end
+ describe '#users' do
+ let(:path) { "/projects/#{project}/permissions/users" }
+
+ it 'requests a collection' do
+ expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :user, page_offset: 0, limit: nil)
+
+ subject.users(project)
+ end
+
+ it 'requests a collection with offset and limit' do
+ offset = 10
+ limit = 100
+
+ expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :user, page_offset: offset, limit: limit)
+
+ subject.users(project, page_offset: offset, limit: limit)
+ end
+ end
+
describe '#create_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
diff --git a/spec/lib/bitbucket_server/connection_spec.rb b/spec/lib/bitbucket_server/connection_spec.rb
index 8341ca10f43..59eda91285f 100644
--- a/spec/lib/bitbucket_server/connection_spec.rb
+++ b/spec/lib/bitbucket_server/connection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BitbucketServer::Connection do
+RSpec.describe BitbucketServer::Connection, feature_category: :importers do
let(:options) { { base_uri: 'https://test:7990', user: 'bitbucket', password: 'mypassword' } }
let(:payload) { { 'test' => 1 } }
let(:headers) { { "Content-Type" => "application/json" } }
@@ -11,83 +11,162 @@ RSpec.describe BitbucketServer::Connection do
subject { described_class.new(options) }
describe '#get' do
- it 'returns JSON body' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 200, headers: headers)
+ before do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' })
+ .to_return(body: payload.to_json, status: 200, headers: headers)
+ end
+
+ it 'runs with retry_with_delay' do
+ expect(subject).to receive(:retry_with_delay).and_call_original.once
- expect(subject.get(url, { something: 1 })).to eq(payload)
+ subject.get(url)
end
- it 'throws an exception if the response is not 200' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
+ shared_examples 'handles get requests' do
+ it 'returns JSON body' do
+ expect(subject.get(url, { something: 1 })).to eq(payload)
+ end
+
+ it 'throws an exception if the response is not 200' do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
+
+ expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception if the response is not JSON' do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
- expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception upon a network error' do
+ WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+
+ expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ end
end
- it 'throws an exception if the response is not JSON' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
+ it_behaves_like 'handles get requests'
+
+ context 'when the response is a 429 rate limit reached error' do
+ let(:response) do
+ instance_double(HTTParty::Response, parsed_response: payload, code: 429, headers: headers.merge('retry-after' => '0'))
+ end
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_return(response)
+ end
- expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ it 'sleeps, retries and if the error persists it fails' do
+ expect(Gitlab::BitbucketServerImport::Logger).to receive(:info)
+ .with(message: 'Retrying in 0 seconds due to 429 Too Many Requests')
+ .once
+
+ expect { subject.get(url) }.to raise_error(BitbucketServer::Connection::ConnectionError)
+ end
end
- it 'throws an exception upon a network error' do
- WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+ context 'when the bitbucket_server_importer_exponential_backoff feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
+ end
- expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
+ it_behaves_like 'handles get requests'
end
end
describe '#post' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
- it 'returns JSON body' do
+ before do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
-
- expect(subject.post(url, payload)).to eq(payload)
end
- it 'throws an exception if the response is not 200' do
- WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
+ it 'runs with retry_with_delay' do
+ expect(subject).to receive(:retry_with_delay).and_call_original.once
- expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ subject.post(url, payload)
end
- it 'throws an exception upon a network error' do
- WebMock.stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+ shared_examples 'handles post requests' do
+ it 'returns JSON body' do
+ expect(subject.post(url, payload)).to eq(payload)
+ end
+
+ it 'throws an exception if the response is not 200' do
+ WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
- expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception upon a network error' do
+ WebMock.stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(OpenSSL::SSL::SSLError)
+
+ expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception if the URI is invalid' do
+ stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(URI::InvalidURIError)
+
+ expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ end
end
- it 'throws an exception if the URI is invalid' do
- stub_request(:post, url).with(headers: { 'Accept' => 'application/json' }).to_raise(URI::InvalidURIError)
+ it_behaves_like 'handles post requests'
- expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
+ context 'when the bitbucket_server_importer_exponential_backoff feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
+ end
+
+ it_behaves_like 'handles post requests'
end
end
describe '#delete' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
+ before do
+ WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
+ end
+
context 'branch API' do
let(:branch_path) { '/projects/foo/repos/bar/branches' }
let(:branch_url) { 'https://test:7990/rest/branch-utils/1.0/projects/foo/repos/bar/branches' }
let(:path) {}
- it 'returns JSON body' do
- WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
+ it 'runs with retry_with_delay' do
+ expect(subject).to receive(:retry_with_delay).and_call_original.once
- expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
+ subject.delete(:branches, branch_path, payload)
end
- it 'throws an exception if the response is not 200' do
- WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
+ shared_examples 'handles delete requests' do
+ it 'returns JSON body' do
+ expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
+ end
- expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ it 'throws an exception if the response is not 200' do
+ WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
+
+ expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ end
+
+ it 'throws an exception upon a network error' do
+ WebMock.stub_request(:delete, branch_url).with(headers: headers).to_raise(OpenSSL::SSL::SSLError)
+
+ expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ end
end
- it 'throws an exception upon a network error' do
- WebMock.stub_request(:delete, branch_url).with(headers: headers).to_raise(OpenSSL::SSL::SSLError)
+ it_behaves_like 'handles delete requests'
+
+ context 'with the bitbucket_server_importer_exponential_backoff feature flag disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_importer_exponential_backoff: false)
+ end
- expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
+ it_behaves_like 'handles delete requests'
end
end
end
diff --git a/spec/lib/bitbucket_server/representation/activity_spec.rb b/spec/lib/bitbucket_server/representation/activity_spec.rb
index 0b7e4542cbe..ee0486ab210 100644
--- a/spec/lib/bitbucket_server/representation/activity_spec.rb
+++ b/spec/lib/bitbucket_server/representation/activity_spec.rb
@@ -2,11 +2,12 @@
require 'spec_helper'
-RSpec.describe BitbucketServer::Representation::Activity do
+RSpec.describe BitbucketServer::Representation::Activity, feature_category: :importers do
let(:activities) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:inline_comment) { activities.first }
let(:comment) { activities[3] }
let(:merge_event) { activities[4] }
+ let(:approved_event) { activities[8] }
describe 'regular comment' do
subject { described_class.new(comment) }
@@ -37,4 +38,17 @@ RSpec.describe BitbucketServer::Representation::Activity do
it { expect(subject.created_at).to be_a(Time) }
it { expect(subject.merge_commit).to eq('839fa9a2d434eb697815b8fcafaecc51accfdbbc') }
end
+
+ describe 'approved event' do
+ subject { described_class.new(approved_event) }
+
+ it { expect(subject.id).to eq(15) }
+ it { expect(subject.comment?).to be_falsey }
+ it { expect(subject.inline_comment?).to be_falsey }
+ it { expect(subject.merge_event?).to be_falsey }
+ it { expect(subject.approved_event?).to be_truthy }
+ it { expect(subject.approver_username).to eq('slug') }
+ it { expect(subject.approver_email).to eq('test.user@example.com') }
+ it { expect(subject.created_at).to be_a(Time) }
+ end
end
diff --git a/spec/lib/bitbucket_server/representation/user_spec.rb b/spec/lib/bitbucket_server/representation/user_spec.rb
new file mode 100644
index 00000000000..32470e3a12f
--- /dev/null
+++ b/spec/lib/bitbucket_server/representation/user_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BitbucketServer::Representation::User, feature_category: :importers do
+ let(:email) { 'test@email.com' }
+ let(:username) { 'test_user' }
+ let(:sample_data) { { 'user' => { 'emailAddress' => email, 'slug' => username } } }
+
+ subject(:user) { described_class.new(sample_data) }
+
+ describe '#email' do
+ it { expect(user.email).to eq(email) }
+ end
+
+ describe '#username' do
+ it { expect(user.username).to eq(username) }
+ end
+end
diff --git a/spec/lib/bitbucket_server/retry_with_delay_spec.rb b/spec/lib/bitbucket_server/retry_with_delay_spec.rb
new file mode 100644
index 00000000000..99685b08299
--- /dev/null
+++ b/spec/lib/bitbucket_server/retry_with_delay_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BitbucketServer::RetryWithDelay, feature_category: :importers do
+ let(:service) { dummy_class.new }
+ let(:body) { 'test' }
+ let(:response) { instance_double(HTTParty::Response, body: body, code: 200) }
+ let(:response_caller) { -> { response } }
+
+ let(:dummy_class) do
+ Class.new do
+ def logger
+ @logger ||= Logger.new(File::NULL)
+ end
+
+ def dummy_method(response_caller)
+ retry_with_delay do
+ response_caller.call
+ end
+ end
+
+ include BitbucketServer::RetryWithDelay
+ end
+ end
+
+ subject(:execute) { service.dummy_method(response_caller) }
+
+ describe '.retry_with_delay' do
+ context 'when the function succeeds on the first try' do
+ it 'calls the function once and returns its result' do
+ expect(response_caller).to receive(:call).once.and_call_original
+
+ execute
+ end
+ end
+
+ context 'when the request has a status code of 429' do
+ let(:headers) { { 'retry-after' => '0' } }
+ let(:body) { 'HTTP Status 429 - Too Many Requests' }
+ let(:response) { instance_double(HTTParty::Response, body: body, code: 429, headers: headers) }
+
+ before do
+ stub_const("#{described_class}::MAXIMUM_DELAY", 0)
+ end
+
+ it 'calls the function again after a delay' do
+ expect(response_caller).to receive(:call).twice.and_call_original
+
+ expect_next_instance_of(Logger) do |logger|
+ expect(logger).to receive(:info)
+ .with(message: 'Retrying in 0 seconds due to 429 Too Many Requests')
+ .once
+ end
+
+ execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 08d0509b54f..2eceefe3091 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -250,9 +250,9 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
.to_return(status: 401, body: "", headers: { 'Content-Type' => 'application/json' })
- expect { subject.instance_version }.to raise_exception(BulkImports::Error,
- "Personal access token does not have the required 'api' scope or " \
- "is no longer valid.")
+ expect { subject.instance_version }
+ .to raise_exception(BulkImports::Error,
+ "Check that the source instance base URL and the personal access token meet the necessary requirements.")
end
end
@@ -262,9 +262,9 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
.to_return(status: 403, body: "", headers: { 'Content-Type' => 'application/json' })
- expect { subject.instance_version }.to raise_exception(BulkImports::Error,
- "Personal access token does not have the required 'api' scope or " \
- "is no longer valid.")
+ expect { subject.instance_version }
+ .to raise_exception(BulkImports::Error,
+ "Check that the source instance base URL and the personal access token meet the necessary requirements.")
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
index b96ea20c676..e1ad9c75dcb 100644
--- a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
@@ -10,16 +10,13 @@ RSpec.describe BulkImports::Common::Pipelines::EntityFinisher, feature_category:
subject = described_class.new(context)
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(entity).and_call_original
+
expect(logger)
.to receive(:info)
.with(
- bulk_import_id: entity.bulk_import_id,
- bulk_import_entity_id: entity.id,
- bulk_import_entity_type: entity.source_type,
- source_full_path: entity.source_full_path,
pipeline_class: described_class.name,
- message: 'Entity finished',
- source_version: entity.bulk_import.source_version_info.to_s
+ message: 'Entity finished'
)
end
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
index 5ba9bd08009..5662c4d7bdc 100644
--- a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -192,7 +192,7 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline, feature_categ
allow(object).to receive(:persisted?).and_return(false)
end
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect_next_instance_of(BulkImports::Logger) do |logger|
expect(logger)
.to receive(:warn)
.with(project_id: portable.id,
diff --git a/spec/lib/bulk_imports/logger_spec.rb b/spec/lib/bulk_imports/logger_spec.rb
new file mode 100644
index 00000000000..889e5573c66
--- /dev/null
+++ b/spec/lib/bulk_imports/logger_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Logger, feature_category: :importers do
+ describe '#with_entity' do
+ subject(:logger) { described_class.new('/dev/null').with_entity(entity) }
+
+ let(:entity) { build(:bulk_import_entity) }
+
+ it 'records the entity information' do
+ output = logger.format_message('INFO', Time.zone.now, 'test', 'Hello world')
+ data = Gitlab::Json.parse(output)
+
+ expect(data).to include(
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'source_full_path' => entity.source_full_path,
+ 'source_version' => entity.bulk_import.source_version_info.to_s
+ )
+ end
+ end
+
+ describe '#with_tracker' do
+ subject(:logger) { described_class.new('/dev/null').with_tracker(tracker) }
+
+ let_it_be(:tracker) { build(:bulk_import_tracker) }
+
+ it 'records the tracker information' do
+ output = logger.format_message('INFO', Time.zone.now, 'test', 'Hello world')
+ data = Gitlab::Json.parse(output)
+
+ expect(data).to include(
+ 'tracker_id' => tracker.id,
+ 'pipeline_class' => tracker.pipeline_name,
+ 'tracker_state' => tracker.human_status_name
+ )
+ end
+
+ it 'also loads the entity data' do
+ expect_next_instance_of(described_class) do |logger|
+ expect(logger).to receive(:with_entity).once
+ end
+
+ logger
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/network_error_spec.rb b/spec/lib/bulk_imports/network_error_spec.rb
index d5e2b739c8f..7a7250416f7 100644
--- a/spec/lib/bulk_imports/network_error_spec.rb
+++ b/spec/lib/bulk_imports/network_error_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache, feature_category: :importers do
let(:tracker) { double(id: 1, stage: 2, entity: double(id: 3)) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
describe '.new' do
it 'requires either a message or a HTTP response' do
expect { described_class.new }
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 4540408990c..5482068204d 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -55,13 +55,11 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
shared_examples 'failed pipeline' do |exception_class, exception_message|
it 'logs import failure' do
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:error)
.with(
a_hash_including(
- 'bulk_import_entity_id' => entity.id,
'bulk_import_id' => entity.bulk_import_id,
- 'bulk_import_entity_type' => entity.source_type,
- 'source_full_path' => entity.source_full_path,
'pipeline_step' => :extractor,
'pipeline_class' => 'BulkImports::MyPipeline',
'exception.class' => exception_class,
@@ -69,8 +67,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
'correlation_id' => anything,
'class' => 'BulkImports::MyPipeline',
'message' => 'An object of a pipeline failed to import',
- 'exception.backtrace' => anything,
- 'source_version' => entity.bulk_import.source_version_info.to_s
+ 'exception.backtrace' => anything
)
)
end
@@ -94,6 +91,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
it 'logs a warn message and marks entity and tracker as failed' do
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:warn)
.with(
log_params(
@@ -195,8 +193,11 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
end
expect(subject).to receive(:on_finish)
+ expect(context.bulk_import).to receive(:touch)
+ expect(context.entity).to receive(:touch)
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:info)
.with(
log_params(
@@ -305,14 +306,14 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
allow(extractor).to receive(:extract).with(context).and_raise(
BulkImports::NetworkError.new(
'Net::ReadTimeout',
- response: instance_double(HTTParty::Response, code: reponse_status_code, headers: {})
+ response: instance_double(HTTParty::Response, code: response_status_code, headers: {})
)
)
end
end
context 'when exception is retriable' do
- let(:reponse_status_code) { 429 }
+ let(:response_status_code) { 429 }
it 'raises the exception BulkImports::RetryPipelineError' do
expect { subject.run }.to raise_error(BulkImports::RetryPipelineError)
@@ -320,7 +321,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
end
context 'when exception is not retriable' do
- let(:reponse_status_code) { 503 }
+ let(:response_status_code) { 505 }
it_behaves_like 'failed pipeline', 'BulkImports::NetworkError', 'Net::ReadTimeout'
end
@@ -417,6 +418,7 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
entity.fail_op!
expect_next_instance_of(BulkImports::Logger) do |logger|
+ expect(logger).to receive(:with_entity).with(context.entity).and_call_original
expect(logger).to receive(:warn)
.with(
log_params(
@@ -434,10 +436,6 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
def log_params(context, extra = {})
{
bulk_import_id: context.bulk_import_id,
- bulk_import_entity_id: context.entity.id,
- bulk_import_entity_type: context.entity.source_type,
- source_full_path: entity.source_full_path,
- source_version: context.entity.bulk_import.source_version_info.to_s,
context_extra: context.extra
}.merge(extra)
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
index e2b99fe4db4..96247329cc2 100644
--- a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
+
let_it_be(:entity) do
create(
:bulk_import_entity,
@@ -19,250 +19,55 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- let(:issue) { create(:issue, project: project, description: 'https://my.gitlab.com/source/full/path/-/issues/1') }
- let(:mr) do
- create(
- :merge_request,
- source_project: project,
- description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!'
- )
- end
- let(:issue_note) do
- create(
- :note,
- project: project,
- noteable: issue,
- note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.'
- )
- end
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:issue_note) { create(:note, noteable: issue, project: project) }
+ let_it_be(:merge_request_note) { create(:note, noteable: merge_request, project: project) }
+ let_it_be(:system_note) { create(:note, project: project, system: true, noteable: issue) }
- let(:mr_note) do
- create(
- :note,
- project: project,
- noteable: mr,
- note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username'
- )
- end
+ let_it_be(:random_project) { create(:project) }
+ let_it_be(:random_issue) { create(:issue, project: random_project) }
+ let_it_be(:random_merge_request) { create(:merge_request, source_project: random_project) }
+ let_it_be(:random_issue_note) { create(:note, noteable: random_issue, project: random_project) }
+ let_it_be(:random_mr_note) { create(:note, noteable: random_merge_request, project: random_project) }
+ let_it_be(:random_system_note) { create(:note, system: true, noteable: random_issue, project: random_project) }
- let(:interchanged_usernames) do
- create(
- :note,
- project: project,
- noteable: mr,
- note: '@manuelgrabowski-admin, @boaty-mc-boatface'
- )
- end
-
- let(:old_note_html) { 'old note_html' }
- let(:system_note) do
- create(
- :note,
- project: project,
- system: true,
- noteable: issue,
- note: "mentioned in merge request !#{mr.iid} created by @old_username",
- note_html: old_note_html
- )
- end
-
- let(:username_system_note) do
- create(
- :note,
- project: project,
- system: true,
- noteable: issue,
- note: "mentioned in merge request created by @source_username.",
- note_html: 'empty'
- )
- end
+ let(:delay) { described_class::DELAY }
subject(:pipeline) { described_class.new(context) }
- before do
- project.add_owner(user)
-
- allow(Gitlab::Cache::Import::Caching)
- .to receive(:values_from_hash)
- .and_return({
- 'old_username' => 'new_username',
- 'older_username' => 'newer_username',
- 'source_username' => 'destination_username',
- 'bob' => 'alice-gdk',
- 'alice' => 'bob-gdk',
- 'manuelgrabowski' => 'manuelgrabowski-admin',
- 'manuelgrabowski-admin' => 'manuelgrabowski',
- 'boaty-mc-boatface' => 'boatymcboatface',
- 'boatymcboatface' => 'boaty-mc-boatface'
- })
- end
-
- def create_project_data
- [issue, mr, issue_note, mr_note, system_note, username_system_note]
- end
-
- def create_username_project_data
- [username_system_note]
- end
-
- describe '#extract' do
- it 'returns ExtractedData containing issues, mrs & their notes' do
- create_project_data
+ describe '#run' do
+ it "enqueues TransformReferencesWorker for the project's issues, mrs and their notes" do
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay, [issue.id], 'Issue', tracker.id)
- extracted_data = subject.extract(context)
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay * 2, array_including([issue_note.id, system_note.id]), 'Note', tracker.id)
- expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
- expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note)
- expect(system_note.note_html).not_to eq(old_note_html)
- expect(system_note.note_html)
- .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a>")
- .and include(project.full_path.to_s)
- .and include("@old_username")
- expect(username_system_note.note_html)
- .to include("@source_username")
- end
-
- context 'when object body is nil' do
- let(:issue) { create(:issue, project: project, description: nil) }
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay * 3, [merge_request.id], 'MergeRequest', tracker.id)
- it 'returns ExtractedData not containing the object' do
- extracted_data = subject.extract(context)
+ expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)
+ .with(delay * 4, [merge_request_note.id], 'Note', tracker.id)
- expect(extracted_data.data).to contain_exactly(issue_note, mr, mr_note)
- end
+ subject.run
end
- end
-
- describe '#transform', :clean_gitlab_redis_cache do
- it 'updates matching urls and usernames with new ones' do
- transformed_mr = subject.transform(context, mr)
- transformed_note = subject.transform(context, mr_note)
- transformed_issue = subject.transform(context, issue)
- transformed_issue_note = subject.transform(context, issue_note)
- transformed_system_note = subject.transform(context, system_note)
- transformed_username_system_note = subject.transform(context, username_system_note)
-
- expected_url = URI('')
- expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
- expected_url.host = ::Gitlab.config.gitlab.host
- expected_url.port = ::Gitlab.config.gitlab.port
- expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}"
- expect(transformed_issue_note.note).not_to include("@older_username")
- expect(transformed_mr.description).not_to include("@source_username")
- expect(transformed_system_note.note).not_to include("@old_username")
- expect(transformed_username_system_note.note).not_to include("@source_username")
+ it 'does not enqueue objects that do not belong to the project' do
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, [random_issue.id], 'Issue', tracker.id)
- expect(transformed_issue.description)
- .to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
- expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
- expect(transformed_note.note).to eq("#{expected_url} @same_username")
- expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
- expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username")
- expect(transformed_username_system_note.note).to include("@destination_username.")
- end
-
- it 'handles situations where old usernames are substrings of new usernames' do
- transformed_mr = subject.transform(context, mr)
-
- expect(transformed_mr.description).to include("@alice-gdk")
- expect(transformed_mr.description).not_to include("@bob-gdk-gdk")
- end
-
- it 'handles situations where old and new usernames are interchanged' do
- # e.g
- # |------------------------|-------------------------|
- # | old_username | new_username |
- # |------------------------|-------------------------|
- # | @manuelgrabowski-admin | @manuelgrabowski |
- # | @manuelgrabowski | @manuelgrabowski-admin |
- # |------------------------|-------------------------|
-
- transformed_interchanged_usernames = subject.transform(context, interchanged_usernames)
-
- expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski")
- expect(transformed_interchanged_usernames.note).to include("@boatymcboatface")
- expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin")
- expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface")
- end
-
- context 'when object does not have reference or username' do
- it 'returns object unchanged' do
- issue.update!(description: 'foo')
-
- transformed_issue = subject.transform(context, issue)
-
- expect(transformed_issue.description).to eq('foo')
- end
- end
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, array_including([random_issue_note.id, random_system_note.id]), 'Note', tracker.id)
- context 'when there are not matched urls or usernames' do
- let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
-
- shared_examples 'returns object unchanged' do
- it 'returns object unchanged' do
- issue.update!(description: description)
-
- transformed_issue = subject.transform(context, issue)
-
- expect(transformed_issue.description).to eq(description)
- end
- end
-
- include_examples 'returns object unchanged'
-
- context 'when url path does not start with source full path' do
- let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
-
- include_examples 'returns object unchanged'
- end
-
- context 'when host does not match and url path starts with source full path' do
- let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
-
- include_examples 'returns object unchanged'
- end
-
- context 'when url does not match at all' do
- let(:description) { 'https://website.example/foo/bar' }
-
- include_examples 'returns object unchanged'
- end
- end
- end
-
- describe '#load' do
- it 'saves the object when object body changed' do
- transformed_issue = subject.transform(context, issue)
- transformed_note = subject.transform(context, mr_note)
- transformed_mr = subject.transform(context, mr)
- transformed_issue_note = subject.transform(context, issue_note)
- transformed_system_note = subject.transform(context, system_note)
-
- expect(transformed_issue).to receive(:save!)
- expect(transformed_note).to receive(:save!)
- expect(transformed_mr).to receive(:save!)
- expect(transformed_issue_note).to receive(:save!)
- expect(transformed_system_note).to receive(:save!)
-
- subject.load(context, transformed_issue)
- subject.load(context, transformed_note)
- subject.load(context, transformed_mr)
- subject.load(context, transformed_issue_note)
- subject.load(context, transformed_system_note)
- end
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, [random_merge_request.id], 'MergeRequest', tracker.id)
- context 'when object body is not changed' do
- it 'does not save the object' do
- expect(mr).not_to receive(:save!)
- expect(mr_note).not_to receive(:save!)
- expect(system_note).not_to receive(:save!)
+ expect(BulkImports::TransformReferencesWorker).not_to receive(:perform_in)
+ .with(anything, [random_mr_note.id], 'Note', tracker.id)
- subject.load(context, mr)
- subject.load(context, mr_note)
- subject.load(context, system_note)
- end
+ subject.run
end
end
end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index fc670d10655..8b06e0bb2ee 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Projects::Stage do
+RSpec.describe BulkImports::Projects::Stage, feature_category: :importers do
subject do
entity = build(:bulk_import_entity, :project_entity)
@@ -15,7 +15,8 @@ RSpec.describe BulkImports::Projects::Stage do
expect(pipelines).to include(
hash_including({ stage: 0, pipeline: BulkImports::Projects::Pipelines::ProjectPipeline }),
- hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline })
+ hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline }),
+ hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline })
)
expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
end
diff --git a/spec/lib/click_house/connection_spec.rb b/spec/lib/click_house/connection_spec.rb
new file mode 100644
index 00000000000..dda736dfaa8
--- /dev/null
+++ b/spec/lib/click_house/connection_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::Connection, click_house: :without_migrations, feature_category: :database do
+ let(:connection) { described_class.new(:main) }
+
+ describe '#select' do
+ it 'proxies select to client' do
+ expect(
+ connection.select('SELECT 1')
+ ).to eq([{ '1' => 1 }])
+ end
+ end
+
+ describe '#execute' do
+ it 'proxies execute to client' do
+ create_test_table
+
+ connection.execute(
+ <<~SQL
+ INSERT INTO test_table VALUES (1), (2), (3)
+ SQL
+ )
+
+ expect(connection.select('SELECT id FROM test_table')).to eq(
+ [{ 'id' => 1 }, { 'id' => 2 }, { 'id' => 3 }]
+ )
+ end
+ end
+
+ describe '#table_exists?' do
+ it "return false when table doesn't exist" do
+ expect(connection.table_exists?('test_table')).to eq(false)
+ end
+
+ it 'returns true when table exists' do
+ create_test_table
+
+ expect(connection.table_exists?('test_table')).to eq(true)
+ end
+ end
+
+ def create_test_table
+ connection.execute(
+ <<~SQL
+ CREATE TABLE test_table (
+ id UInt64
+ ) ENGINE = MergeTree
+ PRIMARY KEY(id)
+ SQL
+ )
+ end
+end
diff --git a/spec/lib/click_house/iterator_spec.rb b/spec/lib/click_house/iterator_spec.rb
new file mode 100644
index 00000000000..fd054c0afe5
--- /dev/null
+++ b/spec/lib/click_house/iterator_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::Iterator, :click_house, feature_category: :database do
+ let(:query_builder) { ClickHouse::QueryBuilder.new('event_authors') }
+ let(:connection) { ClickHouse::Connection.new(:main) }
+ let(:iterator) { described_class.new(query_builder: query_builder, connection: connection) }
+
+ before do
+ connection.execute('INSERT INTO event_authors (author_id) SELECT number + 1 FROM numbers(10)')
+ end
+
+ def collect_ids_with_batch_size(of)
+ [].tap do |ids|
+ iterator.each_batch(column: :author_id, of: of) do |scope|
+ query = scope.select(Arel.sql('DISTINCT author_id')).to_sql
+ ids.concat(connection.select(query).pluck('author_id'))
+ end
+ end
+ end
+
+ it 'iterates correctly' do
+ expected_values = (1..10).to_a
+
+ expect(collect_ids_with_batch_size(3)).to match_array(expected_values)
+ expect(collect_ids_with_batch_size(5)).to match_array(expected_values)
+ expect(collect_ids_with_batch_size(10)).to match_array(expected_values)
+ expect(collect_ids_with_batch_size(15)).to match_array(expected_values)
+ end
+
+ context 'when there are no records for the given query' do
+ let(:query_builder) do
+ ClickHouse::QueryBuilder
+ .new('event_authors')
+ .where(author_id: 0)
+ end
+
+ it 'returns no data' do
+ expect(collect_ids_with_batch_size(3)).to match_array([])
+ end
+ end
+end
diff --git a/spec/lib/click_house/migration_support/exclusive_lock_spec.rb b/spec/lib/click_house/migration_support/exclusive_lock_spec.rb
new file mode 100644
index 00000000000..5176cc75266
--- /dev/null
+++ b/spec/lib/click_house/migration_support/exclusive_lock_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::MigrationSupport::ExclusiveLock, feature_category: :database do
+ include ExclusiveLeaseHelpers
+
+ let(:worker_class) do
+ # This worker will be active longer than the ClickHouse worker TTL
+ Class.new do
+ def self.name
+ 'TestWorker'
+ end
+
+ include ::ApplicationWorker
+ include ::ClickHouseWorker
+
+ def perform(*); end
+ end
+ end
+
+ before do
+ stub_const('TestWorker', worker_class)
+ end
+
+ describe '.register_running_worker' do
+ before do
+ TestWorker.click_house_migration_lock(10.seconds)
+ end
+
+ it 'yields without arguments' do
+ expect { |b| described_class.register_running_worker(worker_class, 'test', &b) }.to yield_with_no_args
+ end
+
+ it 'registers worker for a limited period of time', :freeze_time, :aggregate_failures do
+ expect(described_class.active_sidekiq_workers?).to eq false
+
+ described_class.register_running_worker(worker_class, 'test') do
+ expect(described_class.active_sidekiq_workers?).to eq true
+ travel 9.seconds
+ expect(described_class.active_sidekiq_workers?).to eq true
+ travel 2.seconds
+ expect(described_class.active_sidekiq_workers?).to eq false
+ end
+ end
+ end
+
+ describe '.pause_workers?' do
+ subject(:pause_workers?) { described_class.pause_workers? }
+
+ it { is_expected.to eq false }
+
+ context 'with lock taken' do
+ let!(:lease) { stub_exclusive_lease_taken(described_class::MIGRATION_LEASE_KEY) }
+
+ it { is_expected.to eq true }
+ end
+ end
+
+ describe '.execute_migration' do
+ it 'yields without raising error' do
+ expect { |b| described_class.execute_migration(&b) }.to yield_with_no_args
+ end
+
+ context 'when migration lock is taken' do
+ let!(:lease) { stub_exclusive_lease_taken(described_class::MIGRATION_LEASE_KEY) }
+
+ it 'raises LockError' do
+ expect do
+ expect { |b| described_class.execute_migration(&b) }.not_to yield_control
+ end.to raise_error ::ClickHouse::MigrationSupport::Errors::LockError
+ end
+ end
+
+ context 'when ClickHouse workers are still active', :freeze_time do
+ let(:sleep_time) { described_class::WORKERS_WAIT_SLEEP }
+ let!(:started_at) { Time.current }
+
+ def migration
+ expect { |b| described_class.execute_migration(&b) }.to yield_with_no_args
+ end
+
+ around do |example|
+ described_class.register_running_worker(worker_class, anything) do
+ example.run
+ end
+ end
+
+ it 'waits for workers and raises ClickHouse::MigrationSupport::LockError if workers do not stop in time' do
+ expect(described_class).to receive(:sleep).at_least(1).with(sleep_time) { travel(sleep_time) }
+
+ expect { migration }.to raise_error(ClickHouse::MigrationSupport::Errors::LockError,
+ /Timed out waiting for active workers/)
+ expect(Time.current - started_at).to eq(described_class::DEFAULT_CLICKHOUSE_WORKER_TTL)
+ end
+
+ context 'when wait_for_clickhouse_workers_during_migration FF is disabled' do
+ before do
+ stub_feature_flags(wait_for_clickhouse_workers_during_migration: false)
+ end
+
+ it 'runs migration without waiting for workers' do
+ expect { migration }.not_to raise_error
+ expect(Time.current - started_at).to eq(0.0)
+ end
+ end
+
+ it 'ignores expired workers' do
+ travel(described_class::DEFAULT_CLICKHOUSE_WORKER_TTL + 1.second)
+
+ migration
+ end
+
+ context 'when worker registration is almost expiring' do
+ let(:worker_class) do
+ # This worker will be active for less than the ClickHouse worker TTL
+ Class.new do
+ def self.name
+ 'TestWorker'
+ end
+
+ include ::ApplicationWorker
+ include ::ClickHouseWorker
+
+ click_house_migration_lock(
+ ClickHouse::MigrationSupport::ExclusiveLock::DEFAULT_CLICKHOUSE_WORKER_TTL - 1.second)
+
+ def perform(*); end
+ end
+ end
+
+ it 'completes migration' do
+ expect(described_class).to receive(:sleep).at_least(1).with(sleep_time) { travel(sleep_time) }
+
+ expect { migration }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/click_house/migration_support/migration_context_spec.rb b/spec/lib/click_house/migration_support/migration_context_spec.rb
new file mode 100644
index 00000000000..0f70e1e3f94
--- /dev/null
+++ b/spec/lib/click_house/migration_support/migration_context_spec.rb
@@ -0,0 +1,203 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
+ click_house: :without_migrations, feature_category: :database do
+ include ClickHouseTestHelpers
+
+ # We don't need to delete data since we don't modify Postgres data
+ self.use_transactional_tests = false
+
+ let(:connection) { ::ClickHouse::Connection.new(:main) }
+ let(:schema_migration) { ClickHouse::MigrationSupport::SchemaMigration.new(connection) }
+
+ let(:migrations_base_dir) { 'click_house/migrations' }
+ let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
+ let(:migration_context) { described_class.new(connection, migrations_dir, schema_migration) }
+ let(:target_version) { nil }
+
+ after do
+ unload_click_house_migration_classes(expand_fixture_path(migrations_base_dir))
+ end
+
+ describe 'performs migrations' do
+ include ExclusiveLeaseHelpers
+
+ subject(:migration) { migrate(migration_context, target_version) }
+
+ describe 'when creating a table' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:lease_key) { 'click_house:migrations' }
+ let(:lease_timeout) { 1.hour }
+
+ it 'executes migration through ClickHouse::MigrationSupport::ExclusiveLock.execute_migration' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock).to receive(:execute_migration)
+
+ # Test that not running execute_migration will not execute migrations
+ expect { migration }.not_to change { active_schema_migrations_count }
+ end
+
+ it 'creates a table' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock).to receive(:execute_migration).and_call_original
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
+
+ table_schema = describe_table('some')
+ expect(schema_migrations).to contain_exactly(a_hash_including(version: '1', active: 1))
+ expect(table_schema).to match({
+ id: a_hash_including(type: 'UInt64'),
+ date: a_hash_including(type: 'Date')
+ })
+ end
+
+ context 'when a migration is already running' do
+ let(:migration_name) { 'create_some_table' }
+
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
+
+ it 'raises error after timeout when migration is executing concurrently' do
+ expect { migration }.to raise_error(ClickHouse::MigrationSupport::Errors::LockError)
+ .and not_change { active_schema_migrations_count }
+ end
+ end
+ end
+
+ describe 'when dropping a table' do
+ let(:migrations_dirname) { 'drop_table' }
+ let(:target_version) { 2 }
+
+ it 'drops table' do
+ migrate(migration_context, 1)
+ expect(table_names).to include('some')
+
+ migration
+ expect(table_names).not_to include('some')
+ end
+ end
+
+ context 'when a migration raises an error' do
+ let(:migrations_dirname) { 'migration_with_error' }
+
+ it 'passes the error to caller as a StandardError' do
+ expect { migration }.to raise_error StandardError,
+ "An error has occurred, all later migrations canceled:\n\nA migration error happened"
+ expect(schema_migrations).to be_empty
+ end
+ end
+
+ context 'when connecting to not-existing database' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:connection) { ::ClickHouse::Connection.new(:unknown_database) }
+
+ it 'raises ConfigurationError' do
+ expect { migration }.to raise_error ClickHouse::Client::ConfigurationError,
+ "The database 'unknown_database' is not configured"
+ end
+ end
+
+ context 'when target_version is incorrect' do
+ let(:target_version) { 2 }
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'raises UnknownMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::UnknownMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+
+ context 'when migrations with duplicate name exist' do
+ let(:migrations_dirname) { 'duplicate_name' }
+
+ it 'raises DuplicateMigrationNameError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::DuplicateMigrationNameError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+
+ context 'when migrations with duplicate version exist' do
+ let(:migrations_dirname) { 'duplicate_version' }
+
+ it 'raises DuplicateMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::DuplicateMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 0
+ end
+ end
+ end
+
+ describe 'performs rollbacks' do
+ subject(:migration) { rollback(migration_context, target_version) }
+
+ before do
+ # Ensure that all migrations are up
+ migrate(migration_context, nil)
+ end
+
+ context 'when down method is present' do
+ let(:migrations_dirname) { 'table_creation_with_down_method' }
+
+ context 'when specifying target_version' do
+ it 'removes migrations and performs down method' do
+ expect(table_names).to include('some', 'another')
+
+ # test that target_version is prioritized over step
+ expect { rollback(migration_context, 1, 10000) }.to change { active_schema_migrations_count }.from(2).to(1)
+ expect(table_names).not_to include('another')
+ expect(table_names).to include('some')
+ expect(schema_migrations).to contain_exactly(
+ a_hash_including(version: '1', active: 1),
+ a_hash_including(version: '2', active: 0)
+ )
+
+ expect { rollback(migration_context, nil) }.to change { active_schema_migrations_count }.to(0)
+ expect(table_names).not_to include('some', 'another')
+
+ expect(schema_migrations).to contain_exactly(
+ a_hash_including(version: '1', active: 0),
+ a_hash_including(version: '2', active: 0)
+ )
+ end
+ end
+
+ context 'when specifying step' do
+ it 'removes migrations and performs down method' do
+ expect(table_names).to include('some', 'another')
+
+ expect { rollback(migration_context, nil, 1) }.to change { active_schema_migrations_count }.from(2).to(1)
+ expect(table_names).not_to include('another')
+ expect(table_names).to include('some')
+
+ expect { rollback(migration_context, nil, 2) }.to change { active_schema_migrations_count }.to(0)
+ expect(table_names).not_to include('some', 'another')
+ end
+ end
+ end
+
+ context 'when down method is missing' do
+ let(:migrations_dirname) { 'plain_table_creation' }
+ let(:target_version) { 0 }
+
+ it 'removes migration ignoring missing down method' do
+ expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
+ .and not_change { table_names & %w[some] }.from(%w[some])
+ end
+ end
+
+ context 'when target_version is incorrect' do
+ let(:target_version) { -1 }
+ let(:migrations_dirname) { 'plain_table_creation' }
+
+ it 'raises UnknownMigrationVersionError' do
+ expect { migration }.to raise_error ClickHouse::MigrationSupport::Errors::UnknownMigrationVersionError
+
+ expect(active_schema_migrations_count).to eq 1
+ end
+ end
+ end
+end
diff --git a/spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb b/spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb
new file mode 100644
index 00000000000..03c9edfabaa
--- /dev/null
+++ b/spec/lib/click_house/migration_support/sidekiq_middleware_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::MigrationSupport::SidekiqMiddleware, feature_category: :database do
+ let(:worker_with_click_house_worker) do
+ Class.new do
+ def self.name
+ 'TestWorker'
+ end
+ include ApplicationWorker
+ include ClickHouseWorker
+ end
+ end
+
+ let(:worker_without_click_house_worker) do
+ Class.new do
+ def self.name
+ 'TestWorkerWithoutClickHouseWorker'
+ end
+ include ApplicationWorker
+ end
+ end
+
+ subject(:middleware) { described_class.new }
+
+ before do
+ stub_const('TestWorker', worker_with_click_house_worker)
+ stub_const('TestWorkerWithoutClickHouseWorker', worker_without_click_house_worker)
+ end
+
+ describe '#call' do
+ let(:worker) { worker_class.new }
+ let(:job) { { 'jid' => 123, 'class' => worker_class.name } }
+ let(:queue) { 'test_queue' }
+
+ context 'when worker does not include ClickHouseWorker' do
+ let(:worker_class) { worker_without_click_house_worker }
+
+ it 'yields control without registering running worker' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock).not_to receive(:register_running_worker)
+ expect { |b| middleware.call(worker, job, queue, &b) }.to yield_with_no_args
+ end
+ end
+
+ context 'when worker includes ClickHouseWorker' do
+ let(:worker_class) { worker_with_click_house_worker }
+
+ it 'registers running worker and yields control' do
+ expect(ClickHouse::MigrationSupport::ExclusiveLock)
+ .to receive(:register_running_worker)
+ .with(worker_class, 'test_queue:123')
+ .and_wrap_original do |method, worker_class, worker_id|
+ expect { |b| method.call(worker_class, worker_id, &b) }.to yield_with_no_args
+ end
+
+ middleware.call(worker, job, queue)
+ end
+ end
+ end
+end
diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb
index 9ff11899e89..c7186011654 100644
--- a/spec/lib/extracts_ref_spec.rb
+++ b/spec/lib/extracts_ref_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ExtractsRef do
+RSpec.describe ExtractsRef, feature_category: :source_code_management do
include described_class
include RepoHelpers
@@ -98,6 +98,12 @@ RSpec.describe ExtractsRef do
it { is_expected.to eq(nil) }
end
+
+ context 'when ref_type is a hash' do
+ let(:ref_type) { { 'just' => 'hash' } }
+
+ it { is_expected.to eq(nil) }
+ end
end
it_behaves_like 'extracts refs'
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 7860d85457a..64c249770b7 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -5,1069 +5,1123 @@ require 'spec_helper'
RSpec.describe Feature, :clean_gitlab_redis_feature_flag, stub_feature_flags: false, feature_category: :shared do
include StubVersion
- before do
- # reset Flipper AR-engine
- Feature.reset
- skip_feature_flags_yaml_validation
- end
+ # Pick a long-lasting real feature flag to test that we can check feature flags in the load balancer
+ let(:load_balancer_test_feature_flag) { :require_email_verification }
- describe '.current_request' do
- it 'returns a FlipperRequest with a flipper_id' do
- flipper_request = described_class.current_request
+ where(:bypass_load_balancer) { [true, false] }
- expect(flipper_request.flipper_id).to include("FlipperRequest:")
+ with_them do
+ def wrap_all_methods_with_flag_check(lb, flag)
+ lb.methods(false).each do |meth|
+ allow(lb).to receive(meth).and_wrap_original do |m, *args, **kwargs, &block|
+ Feature.enabled?(flag)
+ m.call(*args, **kwargs, &block)
+ end
+ end
end
-
- context 'when request store is inactive' do
- it 'does not cache flipper_id' do
- previous_id = described_class.current_request.flipper_id
-
- expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ before do
+ if bypass_load_balancer
+ stub_env(Feature::BypassLoadBalancer::FLAG, 'true')
+ wrap_all_methods_with_flag_check(ApplicationRecord.load_balancer, load_balancer_test_feature_flag)
end
+
+ # reset Flipper AR-engine
+ Feature.reset
+ skip_feature_flags_yaml_validation
end
- context 'when request store is active', :request_store do
- it 'caches flipper_id when request store is active' do
- previous_id = described_class.current_request.flipper_id
+ describe '.current_request' do
+ it 'returns a FlipperRequest with a flipper_id' do
+ flipper_request = described_class.current_request
- expect(described_class.current_request.flipper_id).to eq(previous_id)
+ expect(flipper_request.flipper_id).to include("FlipperRequest:")
end
- it 'returns a new flipper_id when request ends' do
- previous_id = described_class.current_request.flipper_id
-
- RequestStore.end!
+ context 'when request store is inactive' do
+ it 'does not cache flipper_id' do
+ previous_id = described_class.current_request.flipper_id
- expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
end
- end
- end
- describe '.get' do
- let(:feature) { double(:feature) }
- let(:key) { 'my_feature' }
+ context 'when request store is active', :request_store do
+ it 'caches flipper_id when request store is active' do
+ previous_id = described_class.current_request.flipper_id
- it 'returns the Flipper feature' do
- expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key)
- .and_return(feature)
+ expect(described_class.current_request.flipper_id).to eq(previous_id)
+ end
- expect(described_class.get(key)).to eq(feature)
- end
- end
+ it 'returns a new flipper_id when request ends' do
+ previous_id = described_class.current_request.flipper_id
- describe '.persisted_names' do
- it 'returns the names of the persisted features' do
- Feature.enable('foo')
+ RequestStore.end!
- expect(described_class.persisted_names).to contain_exactly('foo')
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
+ end
end
- it 'returns an empty Array when no features are presisted' do
- expect(described_class.persisted_names).to be_empty
- end
+ describe '.gitlab_instance' do
+ it 'returns a FlipperGitlabInstance with a flipper_id' do
+ flipper_request = described_class.gitlab_instance
- it 'caches the feature names when request store is active',
- :request_store, :use_clean_rails_memory_store_caching do
- Feature.enable('foo')
+ expect(flipper_request.flipper_id).to include("FlipperGitlabInstance:")
+ end
- expect(Gitlab::ProcessMemoryCache.cache_backend)
- .to receive(:fetch)
- .once
- .with('flipper/v1/features', { expires_in: 1.minute })
- .and_call_original
+ it 'caches flipper_id' do
+ previous_id = described_class.gitlab_instance.flipper_id
- 2.times do
- expect(described_class.persisted_names).to contain_exactly('foo')
+ expect(described_class.gitlab_instance.flipper_id).to eq(previous_id)
end
end
- it 'fetches all flags once in a single query', :request_store do
- Feature.enable('foo1')
- Feature.enable('foo2')
-
- queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do
- expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ describe '.get' do
+ let(:feature) { double(:feature) }
+ let(:key) { 'my_feature' }
- RequestStore.clear!
+ it 'returns the Flipper feature' do
+ expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key)
+ .and_return(feature)
- expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ expect(described_class.get(key)).to eq(feature)
end
-
- expect(queries.count).to eq(1)
end
- end
- describe '.persisted_name?' do
- context 'when the feature is persisted' do
- it 'returns true when feature name is a string' do
+ describe '.persisted_names' do
+ it 'returns the names of the persisted features' do
Feature.enable('foo')
- expect(described_class.persisted_name?('foo')).to eq(true)
+ expect(described_class.persisted_names).to contain_exactly('foo')
end
- it 'returns true when feature name is a symbol' do
+ it 'returns an empty Array when no features are presisted' do
+ expect(described_class.persisted_names).to be_empty
+ end
+
+ it 'caches the feature names when request store is active',
+ :request_store, :use_clean_rails_memory_store_caching do
Feature.enable('foo')
- expect(described_class.persisted_name?(:foo)).to eq(true)
- end
- end
+ expect(Gitlab::ProcessMemoryCache.cache_backend)
+ .to receive(:fetch)
+ .once
+ .with('flipper/v1/features', { expires_in: 1.minute })
+ .and_call_original
- context 'when the feature is not persisted' do
- it 'returns false when feature name is a string' do
- expect(described_class.persisted_name?('foo')).to eq(false)
+ 2.times do
+ expect(described_class.persisted_names).to contain_exactly('foo')
+ end
end
- it 'returns false when feature name is a symbol' do
- expect(described_class.persisted_name?(:bar)).to eq(false)
- end
- end
- end
+ it 'fetches all flags once in a single query', :request_store do
+ Feature.enable('foo1')
+ Feature.enable('foo2')
- describe '.all' do
- let(:features) { Set.new }
+ queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
- it 'returns the Flipper features as an array' do
- expect_any_instance_of(Flipper::DSL).to receive(:features)
- .and_return(features)
+ RequestStore.clear!
- expect(described_class.all).to eq(features.to_a)
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ end
+
+ expect(queries.count).to eq(1)
+ end
end
- end
- describe '.flipper' do
- context 'when request store is inactive' do
- it 'memoizes the Flipper instance but does not not enable Flipper memoization' do
- expect(Flipper).to receive(:new).once.and_call_original
+ describe '.persisted_name?' do
+ context 'when the feature is persisted' do
+ it 'returns true when feature name is a string' do
+ Feature.enable('foo')
- 2.times do
- described_class.flipper
+ expect(described_class.persisted_name?('foo')).to eq(true)
end
- expect(described_class.flipper.adapter.memoizing?).to eq(false)
- end
- end
+ it 'returns true when feature name is a symbol' do
+ Feature.enable('foo')
- context 'when request store is active', :request_store do
- it 'memoizes the Flipper instance' do
- expect(Flipper).to receive(:new).once.and_call_original
+ expect(described_class.persisted_name?(:foo)).to eq(true)
+ end
+ end
- described_class.flipper
- described_class.instance_variable_set(:@flipper, nil)
- described_class.flipper
+ context 'when the feature is not persisted' do
+ it 'returns false when feature name is a string' do
+ expect(described_class.persisted_name?('foo')).to eq(false)
+ end
- expect(described_class.flipper.adapter.memoizing?).to eq(true)
+ it 'returns false when feature name is a symbol' do
+ expect(described_class.persisted_name?(:bar)).to eq(false)
+ end
end
end
- end
- describe '.enabled?' do
- before do
- allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ describe '.all' do
+ let(:features) { Set.new }
- stub_feature_flag_definition(:disabled_feature_flag)
- stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true)
- end
+ it 'returns the Flipper features as an array' do
+ expect_any_instance_of(Flipper::DSL).to receive(:features)
+ .and_return(features)
- context 'when using redis cache', :use_clean_rails_redis_caching do
- it 'does not make recursive feature-flag calls' do
- expect(described_class).to receive(:enabled?).once.and_call_original
- described_class.enabled?(:disabled_feature_flag)
+ expect(described_class.all).to eq(features.to_a)
end
end
- context 'when self-recursive' do
- before do
- allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
- original.call(name) do |ff|
- Feature.enabled?(name)
- block.call(ff)
+ describe '.flipper' do
+ context 'when request store is inactive' do
+ it 'memoizes the Flipper instance but does not not enable Flipper memoization' do
+ expect(Flipper).to receive(:new).once.and_call_original
+
+ 2.times do
+ described_class.flipper
end
+
+ expect(described_class.flipper.adapter.memoizing?).to eq(false)
end
end
- it 'returns the default value' do
- expect(described_class.enabled?(:enabled_feature_flag)).to eq true
- end
+ context 'when request store is active', :request_store do
+ it 'memoizes the Flipper instance' do
+ expect(Flipper).to receive(:new).once.and_call_original
- it 'detects self recursion' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] })
+ described_class.flipper
+ described_class.instance_variable_set(:@flipper, nil)
+ described_class.flipper
- described_class.enabled?(:enabled_feature_flag)
+ expect(described_class.flipper.adapter.memoizing?).to eq(true)
+ end
end
end
- context 'when deeply recursive' do
+ describe '.enabled?' do
before do
- allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
- original.call(name) do |ff|
- Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true)
- block.call(ff)
- end
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+
+ stub_feature_flag_definition(:disabled_feature_flag)
+ stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true)
+ end
+
+ context 'when using redis cache', :use_clean_rails_redis_caching do
+ it 'does not make recursive feature-flag calls' do
+ expect(described_class).to receive(:enabled?).once.and_call_original
+ described_class.enabled?(:disabled_feature_flag)
end
end
- it 'detects deep recursion' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10))
+ context 'when self-recursive' do
+ before do
+ allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
+ original.call(name) do |ff|
+ Feature.enabled?(name)
+ block.call(ff)
+ end
+ end
+ end
- described_class.enabled?(:enabled_feature_flag)
- end
- end
+ it 'returns the default value' do
+ expect(described_class.enabled?(:enabled_feature_flag)).to eq true
+ end
- it 'returns false (and tracks / raises exception for dev) for undefined feature' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ it 'detects self recursion' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] })
- expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
- end
+ described_class.enabled?(:enabled_feature_flag)
+ end
+ end
- it 'returns false for undefined feature with default_enabled_if_undefined: false' do
- expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_falsey
- end
+ context 'when deeply recursive' do
+ before do
+ allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
+ original.call(name) do |ff|
+ Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true)
+ block.call(ff)
+ end
+ end
+ end
- it 'returns true for undefined feature with default_enabled_if_undefined: true' do
- expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_truthy
- end
+ it 'detects deep recursion' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10))
- it 'returns false for existing disabled feature in the database' do
- described_class.disable(:disabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
+ end
- expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey
- end
+ it 'returns false (and tracks / raises exception for dev) for undefined feature' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- it 'returns true for existing enabled feature in the database' do
- described_class.enable(:enabled_feature_flag)
+ expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
+ end
- expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
- end
+ it 'returns false for undefined feature with default_enabled_if_undefined: false' do
+ expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_falsey
+ end
- it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
- it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) }
+ it 'returns true for undefined feature with default_enabled_if_undefined: true' do
+ expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_truthy
+ end
- it 'caches the status in L1 and L2 caches',
- :request_store, :use_clean_rails_memory_store_caching do
- described_class.enable(:disabled_feature_flag)
- flipper_key = "flipper/v1/feature/disabled_feature_flag"
+ it 'returns false for existing disabled feature in the database' do
+ described_class.disable(:disabled_feature_flag)
- expect(described_class.send(:l2_cache_backend))
- .to receive(:fetch)
- .once
- .with(flipper_key, { expires_in: 1.hour })
- .and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey
+ end
- expect(described_class.send(:l1_cache_backend))
- .to receive(:fetch)
- .once
- .with(flipper_key, { expires_in: 1.minute })
- .and_call_original
+ it 'returns true for existing enabled feature in the database' do
+ described_class.enable(:enabled_feature_flag)
- 2.times do
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
end
- end
- it 'returns the default value when the database does not exist' do
- fake_default = double('fake default')
- expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" }
+ it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
+ it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) }
- expect(described_class.enabled?(:a_feature, default_enabled_if_undefined: fake_default)).to eq(fake_default)
- end
+ it 'caches the status in L1 and L2 caches',
+ :request_store, :use_clean_rails_memory_store_caching do
+ described_class.enable(:disabled_feature_flag)
+ flipper_key = "flipper/v1/feature/disabled_feature_flag"
- context 'logging is enabled', :request_store do
- before do
- allow(Feature).to receive(:log_feature_flag_states?).and_call_original
+ expect(described_class.send(:l2_cache_backend))
+ .to receive(:fetch)
+ .once
+ .with(flipper_key, { expires_in: 1.hour })
+ .and_call_original
- stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true)
+ expect(described_class.send(:l1_cache_backend))
+ .to receive(:fetch)
+ .once
+ .with(flipper_key, { expires_in: 1.minute })
+ .and_call_original
- described_class.enable(:feature_flag_state_logs)
- described_class.enable(:enabled_feature_flag)
- described_class.enabled?(:enabled_feature_flag)
+ 2.times do
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end
end
- it 'does not log feature_flag_state_logs' do
- expect(described_class.logged_states).not_to have_key("feature_flag_state_logs")
- end
+ it 'returns the default value when the database does not exist' do
+ fake_default = double('fake default')
- it 'logs other feature flags' do
- expect(described_class.logged_states).to have_key(:enabled_feature_flag)
- expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy
- end
- end
+ base_class = Feature::BypassLoadBalancer.enabled? ? Feature::BypassLoadBalancer::FlipperRecord : ActiveRecord::Base
+ expect(base_class).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" }
- context 'cached feature flag', :request_store do
- before do
- described_class.send(:flipper).memoize = false
- described_class.enabled?(:disabled_feature_flag)
+ expect(described_class.enabled?(:a_feature, default_enabled_if_undefined: fake_default)).to eq(fake_default)
end
- it 'caches the status in L1 cache for the first minute' do
- expect do
- expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch)
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(0)
- end
+ context 'logging is enabled', :request_store do
+ before do
+ allow(Feature).to receive(:log_feature_flag_states?).and_call_original
- it 'caches the status in L2 cache after 2 minutes' do
- travel_to 2.minutes.from_now do
- expect do
- expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(0)
+ stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true)
+
+ described_class.enable(:feature_flag_state_logs)
+ described_class.enable(:enabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
+
+ it 'does not log feature_flag_state_logs' do
+ expect(described_class.logged_states).not_to have_key("feature_flag_state_logs")
+ end
+
+ it 'logs other feature flags' do
+ expect(described_class.logged_states).to have_key(:enabled_feature_flag)
+ expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy
end
end
- it 'fetches the status after an hour' do
- travel_to 61.minutes.from_now do
+ context 'cached feature flag', :request_store do
+ before do
+ described_class.send(:flipper).memoize = false
+ described_class.enabled?(:disabled_feature_flag)
+ end
+
+ it 'caches the status in L1 cache for the first minute' do
expect do
expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch)
expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(1)
+ end.not_to exceed_query_limit(0)
end
- end
- end
-
- context 'with current_request actor' do
- context 'when request store is inactive' do
- it 'returns the approximate percentage set' do
- number_of_times = 1_000
- percentage = 50
- described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage)
- gate_values = Array.new(number_of_times) do
- described_class.enabled?(:enabled_feature_flag, described_class.current_request)
+ it 'caches the status in L2 cache after 2 minutes' do
+ travel_to 2.minutes.from_now do
+ expect do
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end.not_to exceed_query_limit(0)
end
+ end
- margin_of_error = 0.05 * number_of_times
- expected_size = number_of_times * percentage / 100
- expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size)
+ it 'fetches the status after an hour' do
+ travel_to 61.minutes.from_now do
+ expect do
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end.not_to exceed_query_limit(1)
+ end
end
end
- context 'when request store is active', :request_store do
- it 'always returns the same gate value' do
- described_class.enable_percentage_of_actors(:enabled_feature_flag, 50)
+ [:current_request, :request, described_class.current_request].each do |thing|
+ context "with #{thing} actor" do
+ context 'when request store is inactive' do
+ it 'returns the approximate percentage set' do
+ number_of_times = 1_000
+ percentage = 50
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage)
- previous_gate_value = described_class.enabled?(:enabled_feature_flag, described_class.current_request)
+ gate_values = Array.new(number_of_times) do
+ described_class.enabled?(:enabled_feature_flag, thing)
+ end
- 1_000.times do
- expect(described_class.enabled?(:enabled_feature_flag, described_class.current_request)).to eq(previous_gate_value)
+ margin_of_error = 0.05 * number_of_times
+ expected_size = number_of_times * percentage / 100
+ expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size)
+ end
end
- end
- end
- end
- context 'with a group member' do
- let(:key) { :awesome_feature }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when request store is active', :request_store do
+ it 'always returns the same gate value' do
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, 50)
- before do
- described_class.reset
- stub_feature_flag_definition(key)
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ previous_gate_value = described_class.enabled?(:enabled_feature_flag, thing)
+
+ 1_000.times do
+ expect(described_class.enabled?(:enabled_feature_flag, thing)).to eq(previous_gate_value)
+ end
+ end
+ end
end
- described_class.enable(key, described_class.group(:guinea_pigs))
end
- it 'is true for all group members' do
- expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy
- expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy
- expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy
- end
+ context 'with gitlab_instance actor' do
+ it 'always returns the same gate value' do
+ described_class.enable(:enabled_feature_flag, described_class.gitlab_instance)
- it 'is false for any other actor' do
- expect(described_class.enabled?(key, create(:user))).to be_falsey
+ expect(described_class.enabled?(:enabled_feature_flag, described_class.gitlab_instance)).to be_truthy
+ end
end
- end
-
- context 'with an individual actor' do
- let(:actor) { stub_feature_flag_gate('CustomActor:5') }
- let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- before do
- described_class.enable(:enabled_feature_flag, actor)
- end
+ context 'with :instance actor' do
+ it 'always returns the same gate value' do
+ described_class.enable(:enabled_feature_flag, :instance)
- it 'returns true when same actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy
+ expect(described_class.enabled?(:enabled_feature_flag, :instance)).to be_truthy
+ end
end
- it 'returns false when different actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey
- end
+ context 'with a group member' do
+ let(:key) { :awesome_feature }
+ let(:guinea_pigs) { create_list(:user, 3) }
- it 'returns false when no actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey
- end
- end
+ before do
+ described_class.reset
+ stub_feature_flag_definition(key)
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
+ described_class.enable(key, described_class.group(:guinea_pigs))
+ end
- context 'with invalid actor' do
- let(:actor) { double('invalid actor') }
+ it 'is true for all group members' do
+ expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy
+ expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy
+ expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy
+ end
- context 'when is dev_or_test_env' do
- it 'does raise exception' do
- expect { described_class.enabled?(:enabled_feature_flag, actor) }
- .to raise_error /needs to include `FeatureGate` or implement `flipper_id`/
+ it 'is false for any other actor' do
+ expect(described_class.enabled?(key, create(:user))).to be_falsey
end
end
- end
- context 'validates usage of feature flag with YAML definition' do
- let(:definition) do
- Feature::Definition.new('development/my_feature_flag.yml',
- name: 'my_feature_flag',
- type: 'development',
- default_enabled: default_enabled
- ).tap(&:validate!)
- end
+ context 'with an individual actor' do
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- let(:default_enabled) { false }
+ before do
+ described_class.enable(:enabled_feature_flag, actor)
+ end
- before do
- stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+ it 'returns true when same actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy
+ end
- allow(Feature::Definition).to receive(:valid_usage!).and_call_original
- allow(Feature::Definition).to receive(:definitions) do
- { definition.key => definition }
+ it 'returns false when different actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey
end
- end
- it 'when usage is correct' do
- expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error
+ it 'returns false when no actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey
+ end
end
- it 'when invalid type is used' do
- expect { described_class.enabled?(:my_feature_flag, type: :ops) }
- .to raise_error(/The `type:` of/)
- end
+ context 'with invalid actor' do
+ let(:actor) { double('invalid actor') }
- context 'when default_enabled: is false in the YAML definition' do
- it 'reads the default from the YAML definition' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled)
+ context 'when is dev_or_test_env' do
+ it 'does raise exception' do
+ expect { described_class.enabled?(:enabled_feature_flag, actor) }
+ .to raise_error /needs to include `FeatureGate` or implement `flipper_id`/
+ end
end
end
- context 'when default_enabled: is true in the YAML definition' do
- let(:default_enabled) { true }
-
- it 'reads the default from the YAML definition' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(true)
+ context 'validates usage of feature flag with YAML definition' do
+ let(:definition) do
+ Feature::Definition.new('development/my_feature_flag.yml',
+ name: 'my_feature_flag',
+ type: 'development',
+ default_enabled: default_enabled
+ ).tap(&:validate!)
end
- context 'and feature has been disabled' do
- before do
- described_class.disable(:my_feature_flag)
- end
+ let(:default_enabled) { false }
- it 'is not enabled' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(false)
+ before do
+ stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+ lb_ff_definition = Feature::Definition.get(load_balancer_test_feature_flag)
+ allow(Feature::Definition).to receive(:valid_usage!).and_call_original
+ allow(Feature::Definition).to receive(:definitions) do
+ { definition.key => definition, lb_ff_definition.key => lb_ff_definition }
end
end
- context 'with a cached value and the YAML definition is changed thereafter' do
- before do
- described_class.enabled?(:my_feature_flag)
+ it 'when usage is correct' do
+ expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error
+ end
+
+ it 'when invalid type is used' do
+ expect { described_class.enabled?(:my_feature_flag, type: :ops) }
+ .to raise_error(/The `type:` of/)
+ end
+
+ context 'when default_enabled: is false in the YAML definition' do
+ it 'reads the default from the YAML definition' do
+ expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled)
end
+ end
- it 'reads new default value' do
- allow(definition).to receive(:default_enabled).and_return(true)
+ context 'when default_enabled: is true in the YAML definition' do
+ let(:default_enabled) { true }
+ it 'reads the default from the YAML definition' do
expect(described_class.enabled?(:my_feature_flag)).to eq(true)
end
- end
- context 'when YAML definition does not exist for an optional type' do
- let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first }
+ context 'and feature has been disabled' do
+ before do
+ described_class.disable(:my_feature_flag)
+ end
- context 'when in dev or test environment' do
- it 'raises an error for dev' do
- expect { described_class.enabled?(:non_existent_flag, type: optional_type) }
- .to raise_error(
- Feature::InvalidFeatureFlagError,
- "The feature flag YAML definition for 'non_existent_flag' does not exist")
+ it 'is not enabled' do
+ expect(described_class.enabled?(:my_feature_flag)).to eq(false)
end
end
- context 'when in production' do
+ context 'with a cached value and the YAML definition is changed thereafter' do
before do
- allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ described_class.enabled?(:my_feature_flag)
end
- context 'when database exists' do
- before do
- allow(ApplicationRecord.database).to receive(:exists?).and_return(true)
- end
+ it 'reads new default value' do
+ allow(definition).to receive(:default_enabled).and_return(true)
- it 'checks the persisted status and returns false' do
- expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original
+ expect(described_class.enabled?(:my_feature_flag)).to eq(true)
+ end
+ end
+
+ context 'when YAML definition does not exist for an optional type' do
+ let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first }
- expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ context 'when in dev or test environment' do
+ it 'raises an error for dev' do
+ expect { described_class.enabled?(:non_existent_flag, type: optional_type) }
+ .to raise_error(
+ Feature::InvalidFeatureFlagError,
+ "The feature flag YAML definition for 'non_existent_flag' does not exist")
end
end
- context 'when database does not exist' do
+ context 'when in production' do
before do
- allow(ApplicationRecord.database).to receive(:exists?).and_return(false)
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ end
+
+ context 'when database exists' do
+ before do
+ allow(ApplicationRecord.database).to receive(:exists?).and_return(true)
+ end
+
+ it 'checks the persisted status and returns false' do
+ expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original
+
+ expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ end
end
- it 'returns false without checking the status in the database' do
- expect(described_class).not_to receive(:get)
+ context 'when database does not exist' do
+ before do
+ allow(ApplicationRecord.database).to receive(:exists?).and_return(false)
+ end
+
+ it 'returns false without checking the status in the database' do
+ expect(described_class).not_to receive(:get)
- expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ end
end
end
end
end
end
end
- end
-
- describe '.disable?' do
- it 'returns true (and tracks / raises exception for dev) for undefined feature' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
-
- expect(described_class.disabled?(:some_random_feature_flag)).to be_truthy
- end
- it 'returns true for undefined feature with default_enabled_if_undefined: false' do
- expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_truthy
- end
-
- it 'returns false for undefined feature with default_enabled_if_undefined: true' do
- expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_falsey
- end
+ describe '.disable?' do
+ it 'returns true (and tracks / raises exception for dev) for undefined feature' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- it 'returns true for existing disabled feature in the database' do
- stub_feature_flag_definition(:disabled_feature_flag)
- described_class.disable(:disabled_feature_flag)
+ expect(described_class.disabled?(:some_random_feature_flag)).to be_truthy
+ end
- expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy
- end
+ it 'returns true for undefined feature with default_enabled_if_undefined: false' do
+ expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_truthy
+ end
- it 'returns false for existing enabled feature in the database' do
- stub_feature_flag_definition(:enabled_feature_flag)
- described_class.enable(:enabled_feature_flag)
+ it 'returns false for undefined feature with default_enabled_if_undefined: true' do
+ expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_falsey
+ end
- expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey
- end
- end
+ it 'returns true for existing disabled feature in the database' do
+ stub_feature_flag_definition(:disabled_feature_flag)
+ described_class.disable(:disabled_feature_flag)
- shared_examples_for 'logging' do
- let(:expected_action) {}
- let(:expected_extra) {}
+ expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy
+ end
- it 'logs the event' do
- expect(Feature.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra)
+ it 'returns false for existing enabled feature in the database' do
+ stub_feature_flag_definition(:enabled_feature_flag)
+ described_class.enable(:enabled_feature_flag)
- subject
+ expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey
+ end
end
- end
- describe '.enable' do
- subject { described_class.enable(key, thing) }
+ shared_examples_for 'logging' do
+ let(:expected_action) {}
+ let(:expected_extra) {}
- let(:key) { :awesome_feature }
- let(:thing) { true }
+ it 'logs the event' do
+ expect(Feature.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra)
- it_behaves_like 'logging' do
- let(:expected_action) { :enable }
- let(:expected_extra) { { "extra.thing" => "true" } }
+ subject
+ end
end
- # This is documented to return true, modify doc/administration/feature_flags.md if it changes
- it 'returns true' do
- expect(subject).to be true
- end
+ describe '.enable' do
+ subject { described_class.enable(key, thing) }
- context 'when thing is an actor' do
- let(:thing) { create(:user) }
+ let(:key) { :awesome_feature }
+ let(:thing) { true }
it_behaves_like 'logging' do
- let(:expected_action) { eq(:enable) | eq(:remove_opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ let(:expected_action) { :enable }
+ let(:expected_extra) { { "extra.thing" => "true" } }
end
- end
- end
- describe '.disable' do
- subject { described_class.disable(key, thing) }
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
+ end
- let(:key) { :awesome_feature }
- let(:thing) { false }
+ context 'when thing is an actor' do
+ let(:thing) { create(:user) }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable }
- let(:expected_extra) { { "extra.thing" => "false" } }
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:enable) | eq(:remove_opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
+ end
end
- # This is documented to return true, modify doc/administration/feature_flags.md if it changes
- it 'returns true' do
- expect(subject).to be true
- end
+ describe '.disable' do
+ subject { described_class.disable(key, thing) }
- context 'when thing is an actor' do
- let(:thing) { create(:user) }
- let(:flag_opts) { {} }
+ let(:key) { :awesome_feature }
+ let(:thing) { false }
it_behaves_like 'logging' do
let(:expected_action) { :disable }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ let(:expected_extra) { { "extra.thing" => "false" } }
end
- before do
- stub_feature_flag_definition(key, flag_opts)
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
end
- context 'when the feature flag was enabled for this actor' do
- before do
- described_class.enable(key, thing)
- end
+ context 'when thing is an actor' do
+ let(:thing) { create(:user) }
+ let(:flag_opts) { {} }
- it 'marks this thing as disabled' do
- expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(false)
+ before do
+ stub_feature_flag_definition(key, flag_opts)
end
- it 'is possible to re-enable the feature' do
- subject
+ context 'when the feature flag was enabled for this actor' do
+ before do
+ described_class.enable(key, thing)
+ end
- expect { described_class.enable(key, thing) }
- .to change { thing_enabled? }.from(false).to(true)
- end
- end
+ it 'marks this thing as disabled' do
+ expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ end
- context 'when the feature flag is enabled globally' do
- before do
- described_class.enable(key)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(false)
+ end
+
+ it 'is possible to re-enable the feature' do
+ subject
- it 'does not mark this thing as disabled' do
- expect { subject }.not_to change { thing_enabled? }.from(true)
+ expect { described_class.enable(key, thing) }
+ .to change { thing_enabled? }.from(false).to(true)
+ end
end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ context 'when the feature flag is enabled globally' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'does not mark this thing as disabled' do
+ expect { subject }.not_to change { thing_enabled? }.from(true)
+ end
+
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
end
end
end
- end
- describe 'opt_out' do
- subject { described_class.opt_out(key, thing) }
+ describe 'opt_out' do
+ subject { described_class.opt_out(key, thing) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- before do
- stub_feature_flag_definition(key)
- described_class.enable(key)
- end
+ before do
+ stub_feature_flag_definition(key)
+ described_class.enable(key)
+ end
- context 'when thing is an actor' do
- let_it_be(:thing) { create(:project) }
+ context 'when thing is an actor' do
+ let_it_be(:thing) { create(:project) }
- it 'marks this thing as disabled' do
- expect { subject }.to change { thing_enabled? }.from(true).to(false)
- end
+ it 'marks this thing as disabled' do
+ expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
- it_behaves_like 'logging' do
- let(:expected_action) { eq(:opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
- end
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
- it 'stores the opt-out information as a gate' do
- subject
+ it 'stores the opt-out information as a gate' do
+ subject
- flag = described_class.get(key)
+ flag = described_class.get(key)
- expect(flag.actors_value).to include(include(thing.flipper_id))
- expect(flag.actors_value).not_to include(thing.flipper_id)
+ expect(flag.actors_value).to include(include(thing.flipper_id))
+ expect(flag.actors_value).not_to include(thing.flipper_id)
+ end
end
- end
- context 'when thing is a group' do
- let(:thing) { Feature.group(:guinea_pigs) }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when thing is a group' do
+ let(:thing) { Feature.group(:guinea_pigs) }
+ let(:guinea_pigs) { create_list(:user, 3) }
- before do
- Feature.reset
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ before do
+ Feature.reset
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
end
- end
- it 'has no effect' do
- expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ it 'has no effect' do
+ expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ end
end
end
- end
- describe 'remove_opt_out' do
- subject { described_class.remove_opt_out(key, thing) }
+ describe 'remove_opt_out' do
+ subject { described_class.remove_opt_out(key, thing) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- before do
- stub_feature_flag_definition(key)
- described_class.enable(key)
- described_class.opt_out(key, thing)
- end
+ before do
+ stub_feature_flag_definition(key)
+ described_class.enable(key)
+ described_class.opt_out(key, thing)
+ end
- context 'when thing is an actor' do
- let_it_be(:thing) { create(:project) }
+ context 'when thing is an actor' do
+ let_it_be(:thing) { create(:project) }
- it 're-enables this thing' do
- expect { subject }.to change { thing_enabled? }.from(false).to(true)
- end
+ it 're-enables this thing' do
+ expect { subject }.to change { thing_enabled? }.from(false).to(true)
+ end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
- it_behaves_like 'logging' do
- let(:expected_action) { eq(:remove_opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
- end
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:remove_opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
- it 'removes the opt-out information' do
- subject
+ it 'removes the opt-out information' do
+ subject
- flag = described_class.get(key)
+ flag = described_class.get(key)
- expect(flag.actors_value).to be_empty
+ expect(flag.actors_value).to be_empty
+ end
end
- end
- context 'when thing is a group' do
- let(:thing) { Feature.group(:guinea_pigs) }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when thing is a group' do
+ let(:thing) { Feature.group(:guinea_pigs) }
+ let(:guinea_pigs) { create_list(:user, 3) }
- before do
- Feature.reset
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ before do
+ Feature.reset
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
end
- end
- it 'has no effect' do
- expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ it 'has no effect' do
+ expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ end
end
end
- end
- describe '.enable_percentage_of_time' do
- subject { described_class.enable_percentage_of_time(key, percentage) }
+ describe '.enable_percentage_of_time' do
+ subject { described_class.enable_percentage_of_time(key, percentage) }
- let(:key) { :awesome_feature }
- let(:percentage) { 50 }
-
- it_behaves_like 'logging' do
- let(:expected_action) { :enable_percentage_of_time }
- let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
- end
+ let(:key) { :awesome_feature }
+ let(:percentage) { 50 }
- context 'when the flag is on' do
- before do
- described_class.enable(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :enable_percentage_of_time }
+ let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
end
- it 'fails with InvalidOperation' do
- expect { subject }.to raise_error(described_class::InvalidOperation)
+ context 'when the flag is on' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'fails with InvalidOperation' do
+ expect { subject }.to raise_error(described_class::InvalidOperation)
+ end
end
end
- end
- describe '.disable_percentage_of_time' do
- subject { described_class.disable_percentage_of_time(key) }
+ describe '.disable_percentage_of_time' do
+ subject { described_class.disable_percentage_of_time(key) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable_percentage_of_time }
- let(:expected_extra) { {} }
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable_percentage_of_time }
+ let(:expected_extra) { {} }
+ end
end
- end
-
- describe '.enable_percentage_of_actors' do
- subject { described_class.enable_percentage_of_actors(key, percentage) }
- let(:key) { :awesome_feature }
- let(:percentage) { 50 }
+ describe '.enable_percentage_of_actors' do
+ subject { described_class.enable_percentage_of_actors(key, percentage) }
- it_behaves_like 'logging' do
- let(:expected_action) { :enable_percentage_of_actors }
- let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
- end
+ let(:key) { :awesome_feature }
+ let(:percentage) { 50 }
- context 'when the flag is on' do
- before do
- described_class.enable(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :enable_percentage_of_actors }
+ let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
end
- it 'fails with InvalidOperation' do
- expect { subject }.to raise_error(described_class::InvalidOperation)
+ context 'when the flag is on' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'fails with InvalidOperation' do
+ expect { subject }.to raise_error(described_class::InvalidOperation)
+ end
end
end
- end
- describe '.disable_percentage_of_actors' do
- subject { described_class.disable_percentage_of_actors(key) }
+ describe '.disable_percentage_of_actors' do
+ subject { described_class.disable_percentage_of_actors(key) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable_percentage_of_actors }
- let(:expected_extra) { {} }
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable_percentage_of_actors }
+ let(:expected_extra) { {} }
+ end
end
- end
- describe '.remove' do
- subject { described_class.remove(key) }
-
- let(:key) { :awesome_feature }
- let(:actor) { create(:user) }
-
- before do
- described_class.enable(key)
- end
+ describe '.remove' do
+ subject { described_class.remove(key) }
- it_behaves_like 'logging' do
- let(:expected_action) { :remove }
- let(:expected_extra) { {} }
- end
+ let(:key) { :awesome_feature }
+ let(:actor) { create(:user) }
- context 'for a non-persisted feature' do
- it 'returns nil' do
- expect(described_class.remove(:non_persisted_feature_flag)).to be_nil
+ before do
+ described_class.enable(key)
end
- it 'returns true, and cleans up' do
- expect(subject).to be_truthy
- expect(described_class.persisted_names).not_to include(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :remove }
+ let(:expected_extra) { {} }
end
- end
- end
-
- describe '.log_feature_flag_states?' do
- let(:log_state_changes) { false }
- let(:milestone) { "0.0" }
- let(:flag_name) { :some_flag }
- let(:flag_type) { 'development' }
- before do
- Feature.enable(:feature_flag_state_logs)
- Feature.enable(:some_flag)
-
- allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
- allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original
- allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original
+ context 'for a non-persisted feature' do
+ it 'returns nil' do
+ expect(described_class.remove(:non_persisted_feature_flag)).to be_nil
+ end
- stub_feature_flag_definition(flag_name,
- type: flag_type,
- milestone: milestone,
- log_state_changes: log_state_changes)
+ it 'returns true, and cleans up' do
+ expect(subject).to be_truthy
+ expect(described_class.persisted_names).not_to include(key)
+ end
+ end
end
- subject { described_class.log_feature_flag_states?(flag_name) }
+ describe '.log_feature_flag_states?' do
+ let(:log_state_changes) { false }
+ let(:milestone) { "0.0" }
+ let(:flag_name) { :some_flag }
+ let(:flag_type) { 'development' }
- context 'when flag is feature_flag_state_logs' do
- let(:milestone) { "14.6" }
- let(:flag_name) { :feature_flag_state_logs }
- let(:flag_type) { 'ops' }
- let(:log_state_changes) { true }
+ before do
+ Feature.enable(:feature_flag_state_logs)
+ Feature.enable(:some_flag)
- it { is_expected.to be_falsey }
- end
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original
+ allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original
- context 'when flag is old' do
- it { is_expected.to be_falsey }
- end
+ stub_feature_flag_definition(flag_name,
+ type: flag_type,
+ milestone: milestone,
+ log_state_changes: log_state_changes)
+ end
- context 'when flag is old while log_state_changes is not present ' do
- let(:log_state_changes) { nil }
+ subject { described_class.log_feature_flag_states?(flag_name) }
- it { is_expected.to be_falsey }
- end
+ context 'when flag is feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
+ let(:flag_name) { :feature_flag_state_logs }
+ let(:flag_type) { 'ops' }
+ let(:log_state_changes) { true }
- context 'when flag is old but log_state_changes is true' do
- let(:log_state_changes) { true }
+ it { is_expected.to be_falsey }
+ end
- it { is_expected.to be_truthy }
- end
+ context 'when flag is old' do
+ it { is_expected.to be_falsey }
+ end
- context 'when flag is new and not feature_flag_state_logs' do
- let(:milestone) { "14.6" }
+ context 'when flag is old while log_state_changes is not present ' do
+ let(:log_state_changes) { nil }
- before do
- stub_version('14.5.123', 'deadbeef')
+ it { is_expected.to be_falsey }
end
- it { is_expected.to be_truthy }
- end
+ context 'when flag is old but log_state_changes is true' do
+ let(:log_state_changes) { true }
- context 'when milestone is nil' do
- let(:milestone) { nil }
+ it { is_expected.to be_truthy }
+ end
- it { is_expected.to be_falsey }
- end
- end
+ context 'when flag is new and not feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
- context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
- let(:actor) { stub_feature_flag_gate('CustomActor:5') }
- let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
+ before do
+ stub_version('14.5.123', 'deadbeef')
+ end
- # This is a bit unpleasant. For these tests we want to simulate stale reads
- # from the database (due to database load balancing). A simple way to do
- # that is to stub the response on the adapter Flipper uses for reading from
- # the database. However, there isn't a convenient API for this. We know that
- # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can
- # find it that way.
- let(:active_record_adapter) do
- adapter = described_class.flipper
+ it { is_expected.to be_truthy }
+ end
- loop do
- break adapter unless adapter.instance_variable_get(:@adapter)
+ context 'when milestone is nil' do
+ let(:milestone) { nil }
- adapter = adapter.instance_variable_get(:@adapter)
+ it { is_expected.to be_falsey }
end
end
- before do
- stub_feature_flag_definition(:enabled_feature_flag)
- end
+ context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- it 'gives the correct value when enabling for an additional actor' do
- described_class.enable(:enabled_feature_flag, actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ # This is a bit unpleasant. For these tests we want to simulate stale reads
+ # from the database (due to database load balancing). A simple way to do
+ # that is to stub the response on the adapter Flipper uses for reading from
+ # the database. However, there isn't a convenient API for this. We know that
+ # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can
+ # find it that way.
+ let(:active_record_adapter) do
+ adapter = described_class.flipper
- # This should only be enabled for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ loop do
+ break adapter unless adapter.instance_variable_get(:@adapter)
- # Enable for `another_actor` and simulate a stale read
- described_class.enable(:enabled_feature_flag, another_actor)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ adapter = adapter.instance_variable_get(:@adapter)
+ end
+ end
- # Should read from the cache and be enabled for both of these actors
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
+ before do
+ stub_feature_flag_definition(:enabled_feature_flag)
+ end
- it 'gives the correct value when enabling for percentage of time' do
- described_class.enable_percentage_of_time(:enabled_feature_flag, 10)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when enabling for an additional actor' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # Test against `gate_values` directly as otherwise it would be non-determistic
- expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10)
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- # Enable 50% of time and simulate a stale read
- described_class.enable_percentage_of_time(:enabled_feature_flag, 50)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Enable for `another_actor` and simulate a stale read
+ described_class.enable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be enabled 50% of the time
- expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50)
- end
+ # Should read from the cache and be enabled for both of these actors
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
- it 'gives the correct value when disabling the flag' do
- described_class.enable(:enabled_feature_flag, actor)
- described_class.enable(:enabled_feature_flag, another_actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when enabling for percentage of time' do
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 10)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # This be enabled for `actor` and `another_actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ # Test against `gate_values` directly as otherwise it would be non-determistic
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10)
- # Disable for `another_actor` and simulate a stale read
- described_class.disable(:enabled_feature_flag, another_actor)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Enable 50% of time and simulate a stale read
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 50)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be enabled only for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
+ # Should read from the cache and be enabled 50% of the time
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50)
+ end
- it 'gives the correct value when deleting the flag' do
- described_class.enable(:enabled_feature_flag, actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when disabling the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ described_class.enable(:enabled_feature_flag, another_actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # This should only be enabled for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ # This be enabled for `actor` and `another_actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- # Remove and simulate a stale read
- described_class.remove(:enabled_feature_flag)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Disable for `another_actor` and simulate a stale read
+ described_class.disable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be disabled everywhere
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
- end
+ # Should read from the cache and be enabled only for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
+
+ it 'gives the correct value when deleting the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- describe Feature::Target do
- describe '#targets' do
- let(:project) { create(:project) }
- let(:group) { create(:group) }
- let(:user_name) { project.first_owner.username }
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- subject do
- described_class.new(
- user: user_name,
- project: project.full_path,
- group: group.full_path,
- repository: project.repository.full_path
- )
- end
+ # Remove and simulate a stale read
+ described_class.remove(:enabled_feature_flag)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- it 'returns all found targets' do
- expect(subject.targets).to be_an(Array)
- expect(subject.targets).to eq([project.first_owner, project, group, project.repository])
+ # Should read from the cache and be disabled everywhere
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
end
+ end
- context 'when repository target works with different types of repositories' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :wiki_repo, group: group) }
- let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) }
- let(:personal_snippet) { create(:personal_snippet) }
- let(:project_snippet) { create(:project_snippet, project: project) }
-
- let(:targets) do
- [
- project,
- project.wiki,
- project_in_user_namespace,
- personal_snippet,
- project_snippet
- ]
- end
+ describe Feature::Target do
+ describe '#targets' do
+ let(:project) { create(:project) }
+ let(:group) { create(:group) }
+ let(:user_name) { project.first_owner.username }
subject do
described_class.new(
- repository: targets.map { |t| t.repository.full_path }.join(",")
+ user: user_name,
+ project: project.full_path,
+ group: group.full_path,
+ repository: project.repository.full_path
)
end
it 'returns all found targets' do
expect(subject.targets).to be_an(Array)
- expect(subject.targets).to eq(targets.map(&:repository))
+ expect(subject.targets).to eq([project.first_owner, project, group, project.repository])
+ end
+
+ context 'when repository target works with different types of repositories' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :wiki_repo, group: group) }
+ let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) }
+ let(:personal_snippet) { create(:personal_snippet) }
+ let(:project_snippet) { create(:project_snippet, project: project) }
+
+ let(:targets) do
+ [
+ project,
+ project.wiki,
+ project_in_user_namespace,
+ personal_snippet,
+ project_snippet
+ ]
+ end
+
+ subject do
+ described_class.new(
+ repository: targets.map { |t| t.repository.full_path }.join(",")
+ )
+ end
+
+ it 'returns all found targets' do
+ expect(subject.targets).to be_an(Array)
+ expect(subject.targets).to eq(targets.map(&:repository))
+ end
end
end
end
diff --git a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
index 3b166bd4c4c..240472585bb 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_dictionary_matcher.txt
@@ -3,8 +3,8 @@ migration_job_name: MyBatchedMigration
description: # Please capture what MyBatchedMigration does
feature_category: database
introduced_by_url: # URL of the MR \(or issue/commit\) that introduced the migration
-milestone: [0-9\.]+
+milestone: '[0-9\.]+'
queued_migration_version: [0-9]+
# Replace with the approximate date you think it's best to ensure the completion of this BBM.
finalize_after: # yyyy-mm-dd
-finalized_by: # version of the migration that ensured this bbm
+finalized_by: # version of the migration that finalized this BBM
diff --git a/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt b/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
index 36f7885b591..d1fab7cf4bd 100644
--- a/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
+++ b/spec/lib/generators/batched_background_migration/expected_files/queue_my_batched_migration.txt
@@ -19,7 +19,6 @@ class QueueMyBatchedMigration < Gitlab::Database::Migration[2.2]
:projects,
:id,
job_interval: DELAY_INTERVAL,
- queued_migration_version: '<migration_version>',
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
diff --git a/spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb b/spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb
new file mode 100644
index 00000000000..77cc3904560
--- /dev/null
+++ b/spec/lib/generators/gitlab/analytics/group_fetcher_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Analytics::GroupFetcher, :silence_stdout, feature_category: :service_ping do
+ let(:stage_data) do
+ <<~YAML
+ stages:
+ analyze:
+ section: analytics
+ groups:
+ analytics_instrumentation:
+ secure:
+ section: security
+ groups:
+ static_analysis:
+ dynamic_analysis:
+ YAML
+ end
+
+ let(:response) { instance_double(HTTParty::Response, success?: true, body: stage_data) }
+
+ around do |example|
+ described_class.instance_variable_set(:@groups, nil)
+ example.run
+ described_class.instance_variable_set(:@groups, nil)
+ end
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_return(response)
+ end
+
+ context 'when online' do
+ describe '.group_unknown?' do
+ it 'returns false for known groups' do
+ expect(described_class.group_unknown?('analytics_instrumentation')).to be_falsy
+ end
+
+ it 'returns true for unknown groups' do
+ expect(described_class.group_unknown?('unknown')).to be_truthy
+ end
+ end
+
+ describe '.stage_text' do
+ it 'returns the stage name for known groups' do
+ expect(described_class.stage_text('analytics_instrumentation')).to eq('analyze')
+ end
+
+ it 'returns empty string for unknown group' do
+ expect(described_class.stage_text('unknown')).to eq('')
+ end
+ end
+
+ describe '.section_text' do
+ it 'returns the section name for known groups' do
+ expect(described_class.section_text('analytics_instrumentation')).to eq('analytics')
+ end
+
+ it 'returns empty string for unknown group' do
+ expect(described_class.section_text('unknown')).to eq('')
+ end
+ end
+ end
+
+ context 'when offline' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_raise(Gitlab::HTTP_V2::BlockedUrlError)
+ end
+
+ describe '.group_unknown?' do
+ it 'returns false for known groups' do
+ expect(described_class.group_unknown?('analytics_instrumentation')).to be_falsy
+ end
+
+ it 'returns false for unknown group' do
+ expect(described_class.group_unknown?('unknown')).to be_falsy
+ end
+ end
+
+ describe '.stage_text' do
+ it 'returns empty string for known groups' do
+ expect(described_class.stage_text('analytics_instrumentation')).to eq('')
+ end
+
+ it 'returns empty string for unknown groups' do
+ expect(described_class.stage_text('unknown')).to eq('')
+ end
+ end
+
+ describe '.section_text' do
+ it 'returns empty string for known groups' do
+ expect(described_class.section_text('analytics_instrumentation')).to eq('')
+ end
+
+ it 'returns empty string for unknown groups' do
+ expect(described_class.section_text('unknown')).to eq('')
+ end
+ end
+ end
+end
diff --git a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
index c52d17d4a5b..2d9356ca96d 100644
--- a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
+++ b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb
@@ -10,81 +10,27 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
let(:tmpfile) { Tempfile.new('test-metadata') }
let(:existing_key_paths) { {} }
let(:description) { "This metric counts unique users viewing analytics metrics dashboard section" }
- let(:group) { "group::analytics instrumentation" }
- let(:stage) { "analytics" }
+ let(:group) { "analytics_instrumentation" }
+ let(:stage) { "analyze" }
let(:section) { "analytics" }
let(:mr) { "https://gitlab.com/some-group/some-project/-/merge_requests/123" }
let(:event) { "view_analytics_dashboard" }
let(:unique) { "user.id" }
let(:time_frames) { %w[7d] }
+ let(:group_unknown) { false }
let(:include_default_identifiers) { 'yes' }
- let(:options) do
+ let(:base_options) do
{
time_frames: time_frames,
free: true,
mr: mr,
group: group,
- stage: stage,
- section: section,
event: event,
unique: unique
}.stringify_keys
end
- let(:key_path_without_time_frame) { "count_distinct_#{unique.sub('.', '_')}_from_#{event}" }
- let(:key_path_7d) { "#{key_path_without_time_frame}_7d" }
- let(:metric_definition_path_7d) { Dir.glob(File.join(temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first }
- let(:metric_definition_7d) do
- {
- "key_path" => key_path_7d,
- "description" => description,
- "product_section" => section,
- "product_stage" => stage,
- "product_group" => group,
- "performance_indicator_type" => [],
- "value_type" => "number",
- "status" => "active",
- "milestone" => "13.9",
- "introduced_by_url" => mr,
- "time_frame" => "7d",
- "data_source" => "internal_events",
- "data_category" => "optional",
- "instrumentation_class" => "RedisHLLMetric",
- "distribution" => %w[ce ee],
- "tier" => %w[free premium ultimate],
- "options" => {
- "events" => [event]
- },
- "events" => [{ "name" => event, "unique" => unique }]
- }
- end
-
- let(:key_path_all) { "count_total_#{event}" }
- let(:metric_definition_path_all) { Dir.glob(File.join(temp_dir, "metrics/counts_all/#{key_path_all}.yml")).first }
- let(:metric_definition_all) do
- {
- "key_path" => key_path_all,
- "description" => description,
- "product_section" => section,
- "product_stage" => stage,
- "product_group" => group,
- "performance_indicator_type" => [],
- "value_type" => "number",
- "status" => "active",
- "milestone" => "13.9",
- "introduced_by_url" => mr,
- "time_frame" => "all",
- "data_source" => "internal_events",
- "data_category" => "optional",
- "instrumentation_class" => "TotalCountMetric",
- "distribution" => %w[ce ee],
- "tier" => %w[free premium ultimate],
- "options" => {
- "events" => [event]
- },
- "events" => [{ "name" => event }]
- }
- end
+ let(:options) { base_options }
before do
stub_const("#{described_class}::TOP_LEVEL_DIR_EE", ee_temp_dir)
@@ -98,6 +44,10 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
.and_return(description)
end
+ allow(Gitlab::Analytics::GroupFetcher).to receive(:group_unknown?).and_return(group_unknown)
+ allow(Gitlab::Analytics::GroupFetcher).to receive(:stage_text).with(group).and_return(stage)
+ allow(Gitlab::Analytics::GroupFetcher).to receive(:section_text).with(group).and_return(section)
+
allow(Gitlab::TaskHelpers).to receive(:prompt).and_return(include_default_identifiers)
allow(Gitlab::Usage::MetricDefinition).to receive(:definitions).and_return(existing_key_paths)
end
@@ -189,35 +139,85 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
describe 'Creating metric definition file' do
- context 'for single time frame' do
- let(:time_frames) { %w[7d] }
+ let(:metric_dir) { temp_dir }
+ let(:base_key_path_unique) { "count_distinct_#{unique.sub('.', '_')}_from_#{event}" }
+ let(:base_key_path_total) { "count_total_#{event}" }
+ let(:base_metric_definition) do
+ {
+ "description" => description,
+ "product_section" => section,
+ "product_stage" => stage,
+ "product_group" => group,
+ "performance_indicator_type" => [],
+ "value_type" => "number",
+ "status" => "active",
+ "milestone" => "13.9",
+ "introduced_by_url" => mr,
+ "data_source" => "internal_events",
+ "data_category" => "optional",
+ "distribution" => %w[ce ee],
+ "tier" => %w[free premium ultimate],
+ "options" => {
+ "events" => [event]
+ }
+ }
+ end
+
+ let(:metric_definition_extra) { {} }
- it 'creates a metric definition file' do
+ shared_examples 'creates unique metric definitions' do |time_frames|
+ it 'creates a metric definiton for each of the time frames' do
described_class.new([], options).invoke_all
- expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
+ time_frames.each do |time_frame|
+ key_path = "#{base_key_path_unique}_#{time_frame}"
+ metric_definition_path = Dir.glob(File.join(metric_dir, "metrics/counts_#{time_frame}/#{key_path}.yml")).first
+ metric_definition = base_metric_definition.merge(
+ "key_path" => key_path,
+ "time_frame" => time_frame,
+ "events" => [{ "name" => event, "unique" => unique }]
+ ).merge(metric_definition_extra)
+ expect(YAML.safe_load(File.read(metric_definition_path))).to eq(metric_definition)
+ end
end
+ end
- context 'with time frame "all"' do
- let(:time_frames) { %w[all] }
+ shared_examples 'creates total metric definitions' do |time_frames|
+ it 'creates a metric definiton for each of the time frames' do
+ described_class.new([], options).invoke_all
- it 'creates a total count metric definition file' do
- described_class.new([], options).invoke_all
- expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
+ time_frames.each do |time_frame|
+ key_path = "#{base_key_path_total}_#{time_frame}"
+ metric_definition_path = Dir.glob(File.join(metric_dir, "metrics/counts_#{time_frame}/#{key_path}.yml")).first
+ metric_definition = base_metric_definition.merge(
+ "key_path" => key_path,
+ "time_frame" => time_frame,
+ "events" => [{ "name" => event }]
+ ).merge(metric_definition_extra)
+ expect(YAML.safe_load(File.read(metric_definition_path))).to eq(metric_definition)
end
end
+ end
- context 'for ultimate only feature' do
- let(:metric_definition_path_7d) do
- Dir.glob(File.join(ee_temp_dir, temp_dir, "metrics/counts_7d/#{key_path_7d}.yml")).first
- end
+ context 'for single time frame' do
+ let(:time_frames) { %w[7d] }
- it 'creates a metric definition file' do
- described_class.new([], options.merge(tiers: %w[ultimate])).invoke_all
+ it_behaves_like 'creates unique metric definitions', %w[7d]
- expect(YAML.safe_load(File.read(metric_definition_path_7d)))
- .to eq(metric_definition_7d.merge("tier" => ["ultimate"], "distribution" => ["ee"]))
- end
+ context 'with time frame "all" and no "unique"' do
+ let(:time_frames) { %w[all] }
+
+ let(:options) { base_options.except('unique') }
+
+ it_behaves_like 'creates total metric definitions', %w[all]
+ end
+
+ context 'for ultimate only feature' do
+ let(:metric_dir) { File.join(ee_temp_dir, temp_dir) }
+ let(:options) { base_options.merge(tiers: %w[ultimate]) }
+ let(:metric_definition_extra) { { "tier" => ["ultimate"], "distribution" => ["ee"] } }
+
+ it_behaves_like 'creates unique metric definitions', %w[7d]
end
context 'with invalid time frame' do
@@ -228,7 +228,16 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
end
+ context 'with invalid time frame for unique metrics' do
+ let(:time_frames) { %w[all] }
+
+ it 'raises error' do
+ expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError)
+ end
+ end
+
context 'with duplicated key path' do
+ let(:key_path_7d) { "#{base_key_path_unique}_7d" }
let(:existing_key_paths) { { key_path_7d => true } }
it 'raises error' do
@@ -252,14 +261,14 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
context 'without obligatory parameter' do
it 'raises error', :aggregate_failures do
- %w[unique event mr section stage group].each do |option|
+ %w[event mr group].each do |option|
expect { described_class.new([], options.without(option)).invoke_all }
.to raise_error(RuntimeError)
end
end
end
- context 'with to short description' do
+ context 'with too short description' do
it 'asks again for description' do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:ask)
@@ -281,42 +290,28 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat
end
context 'for multiple time frames' do
- let(:time_frames) { %w[7d 28d all] }
- let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
- let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
- let(:metric_definition_28d) do
- metric_definition_7d.merge(
- "key_path" => key_path_28d,
- "time_frame" => "28d"
- )
- end
+ let(:time_frames) { %w[7d 28d] }
- it 'creates metric definition files' do
- described_class.new([], options).invoke_all
-
- expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
- expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
- expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
- end
+ it_behaves_like 'creates unique metric definitions', %w[7d 28d]
end
context 'with default time frames' do
- let(:time_frames) { nil }
- let(:key_path_28d) { "#{key_path_without_time_frame}_28d" }
- let(:metric_definition_path_28d) { Dir.glob(File.join(temp_dir, "metrics/counts_28d/#{key_path_28d}.yml")).first }
- let(:metric_definition_28d) do
- metric_definition_7d.merge(
- "key_path" => key_path_28d,
- "time_frame" => "28d"
- )
- end
+ let(:options) { base_options.without('time_frames', 'unique') }
- it 'creates metric definition files' do
- described_class.new([], options.without('time_frames')).invoke_all
+ it_behaves_like 'creates total metric definitions', %w[7d 28d all]
- expect(YAML.safe_load(File.read(metric_definition_path_7d))).to eq(metric_definition_7d)
- expect(YAML.safe_load(File.read(metric_definition_path_28d))).to eq(metric_definition_28d)
- expect(YAML.safe_load(File.read(metric_definition_path_all))).to eq(metric_definition_all)
+ context 'with unique' do
+ let(:options) { base_options.without('time_frames') }
+
+ it_behaves_like 'creates unique metric definitions', %w[7d 28d]
+
+ it "doesn't create a total 'all' metric" do
+ described_class.new([], options).invoke_all
+
+ key_path = "#{base_key_path_total}_all"
+
+ expect(Dir.glob(File.join(metric_dir, "metrics/counts_all/#{key_path}.yml")).first).to be_nil
+ end
end
end
end
diff --git a/spec/lib/gitlab/access/branch_protection_spec.rb b/spec/lib/gitlab/access/branch_protection_spec.rb
index e54ff8807b5..1ecb1cdd759 100644
--- a/spec/lib/gitlab/access/branch_protection_spec.rb
+++ b/spec/lib/gitlab/access/branch_protection_spec.rb
@@ -90,9 +90,9 @@ RSpec.describe Gitlab::Access::BranchProtection do
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | true
Gitlab::Access::PROTECTION_DEV_CAN_PUSH | false
- Gitlab::Access::PROTECTION_DEV_CAN_MERGE | true
+ Gitlab::Access::PROTECTION_DEV_CAN_MERGE | false
Gitlab::Access::PROTECTION_FULL | false
- Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | true
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | false
end
with_them { it { is_expected.to eq(result) } }
@@ -117,10 +117,10 @@ RSpec.describe Gitlab::Access::BranchProtection do
where(:level, :result) do
Gitlab::Access::PROTECTION_NONE | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
- Gitlab::Access::PROTECTION_DEV_CAN_PUSH | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
+ Gitlab::Access::PROTECTION_DEV_CAN_PUSH | [{ 'access_level' => Gitlab::Access::MAINTAINER }]
Gitlab::Access::PROTECTION_DEV_CAN_MERGE | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
Gitlab::Access::PROTECTION_FULL | [{ 'access_level' => Gitlab::Access::MAINTAINER }]
- Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | [{ 'access_level' => Gitlab::Access::DEVELOPER }]
+ Gitlab::Access::PROTECTION_DEV_CAN_INITIAL_PUSH | [{ 'access_level' => Gitlab::Access::MAINTAINER }]
end
with_them { it { is_expected.to eq(result) } }
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
index aa0a1b66eef..14831f0e61d 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
@@ -2,19 +2,23 @@
require 'spec_helper'
-RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
- let_it_be(:project) { create(:project) }
+RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher, feature_category: :value_stream_management do
+ let_it_be(:project, refind: true) { create(:project, :public) }
let_it_be(:issue_1) { create(:issue, project: project) }
- let_it_be(:issue_2) { create(:issue, project: project) }
+ let_it_be(:issue_2) { create(:issue, :confidential, project: project) }
let_it_be(:issue_3) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, :unique_branches, source_project: project, target_project: project) }
+
+ let_it_be(:user) { create(:user).tap { |u| project.add_developer(u) } }
+
let_it_be(:stage) { create(:cycle_analytics_stage, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production, namespace: project.reload.project_namespace) }
let_it_be(:stage_event_1) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_1.id, start_event_timestamp: 2.years.ago, end_event_timestamp: 1.year.ago) } # duration: 1 year
let_it_be(:stage_event_2) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_2.id, start_event_timestamp: 5.years.ago, end_event_timestamp: 2.years.ago) } # duration: 3 years
let_it_be(:stage_event_3) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_3.id, start_event_timestamp: 6.years.ago, end_event_timestamp: 3.months.ago) } # duration: 5+ years
- let(:params) { { from: 10.years.ago, to: Date.today } }
+ let(:params) { { from: 10.years.ago, to: Date.today, current_user: user } }
subject(:records_fetcher) do
query_builder = Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder.new(stage: stage, params: params)
@@ -25,7 +29,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
it 'returns issues in the correct order' do
returned_iids = records_fetcher.serialized_records.pluck(:iid).map(&:to_i)
- expect(returned_iids).to eq(expected_issue_ids)
+ expect(returned_iids).to eq(expected_iids)
end
it 'passes a hash with all expected attributes to the serializer' do
@@ -52,7 +56,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
describe '#serialized_records' do
describe 'sorting' do
context 'when sorting by end event DESC' do
- let(:expected_issue_ids) { [issue_3.iid, issue_1.iid, issue_2.iid] }
+ let(:expected_iids) { [issue_3.iid, issue_1.iid, issue_2.iid] }
before do
params[:sort] = :end_event
@@ -76,7 +80,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
context 'when sorting by end event ASC' do
- let(:expected_issue_ids) { [issue_2.iid, issue_1.iid, issue_3.iid] }
+ let(:expected_iids) { [issue_2.iid, issue_1.iid, issue_3.iid] }
before do
params[:sort] = :end_event
@@ -87,7 +91,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
context 'when sorting by duration DESC' do
- let(:expected_issue_ids) { [issue_3.iid, issue_2.iid, issue_1.iid] }
+ let(:expected_iids) { [issue_3.iid, issue_2.iid, issue_1.iid] }
before do
params[:sort] = :duration
@@ -98,7 +102,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
context 'when sorting by duration ASC' do
- let(:expected_issue_ids) { [issue_1.iid, issue_2.iid, issue_3.iid] }
+ let(:expected_iids) { [issue_1.iid, issue_2.iid, issue_3.iid] }
before do
params[:sort] = :duration
@@ -110,7 +114,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
describe 'pagination' do
- let(:expected_issue_ids) { [issue_3.iid] }
+ let(:expected_iids) { [issue_3.iid] }
before do
params[:sort] = :duration
@@ -163,4 +167,66 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
end
end
end
+
+ describe 'respecting visibility rules' do
+ let(:expected_iids) { [issue_3.iid, issue_1.iid] }
+
+ subject(:returned_iids) { records_fetcher.serialized_records.pluck(:iid).map(&:to_i) }
+
+ context 'when current user is guest' do
+ before do
+ params[:current_user] = nil
+ end
+
+ it { is_expected.to eq(expected_iids) }
+ end
+
+ context 'when current user is logged and has no access to the project' do
+ before do
+ params[:current_user] = create(:user)
+ end
+
+ it { is_expected.to eq(expected_iids) }
+ end
+ end
+
+ context 'when querying merge requests' do
+ let_it_be(:mr_stage) { create(:cycle_analytics_stage, start_event_identifier: :merge_request_last_build_started, end_event_identifier: :merge_request_last_build_finished, namespace: project.reload.project_namespace) }
+ let_it_be(:mr_stage_event) { create(:cycle_analytics_merge_request_stage_event, stage_event_hash_id: mr_stage.stage_event_hash_id, project_id: project.id, merge_request_id: merge_request.id, start_event_timestamp: 2.years.ago, end_event_timestamp: 1.year.ago) }
+
+ let(:stage) { mr_stage }
+ let(:expected_iids) { [merge_request.iid] }
+
+ subject(:returned_iids) { records_fetcher.serialized_records.pluck(:iid).map(&:to_i) }
+
+ it { is_expected.to eq(expected_iids) }
+
+ context 'when current user is guest' do
+ before do
+ params[:current_user] = nil
+ end
+
+ it { is_expected.to eq([merge_request.iid]) }
+ end
+
+ context 'when current user is logged and has no access to the project' do
+ before do
+ params[:current_user] = create(:user)
+ end
+
+ it { is_expected.to eq([merge_request.iid]) }
+
+ context 'when MR access level is elevated' do
+ before do
+ project.project_feature.update!(
+ builds_access_level: ProjectFeature::PRIVATE,
+ repository_access_level: ProjectFeature::PRIVATE,
+ merge_requests_access_level: ProjectFeature::PRIVATE
+ )
+ end
+
+ it { is_expected.to eq([]) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 20c1536b9e6..99f932975d0 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -210,6 +210,14 @@ RSpec.describe Gitlab::ApplicationContext do
expect(result(context)).to include(job_id: job.id, project: project.full_path, pipeline_id: job.pipeline_id)
end
end
+
+ context 'when using bulk import context' do
+ it 'sets expected bulk_import_entity_id value' do
+ context = described_class.new(bulk_import_entity_id: 1)
+
+ expect(result(context)).to include(bulk_import_entity_id: 1)
+ end
+ end
end
describe '#use' do
diff --git a/spec/lib/gitlab/auth/saml/config_spec.rb b/spec/lib/gitlab/auth/saml/config_spec.rb
index 2ecc26f9b96..bb5446e8d6a 100644
--- a/spec/lib/gitlab/auth/saml/config_spec.rb
+++ b/spec/lib/gitlab/auth/saml/config_spec.rb
@@ -19,6 +19,41 @@ RSpec.describe Gitlab::Auth::Saml::Config do
end
end
+ describe '.default_attribute_statements' do
+ it 'includes upstream defaults, nickname and Microsoft values' do
+ expect(described_class.default_attribute_statements).to eq(
+ {
+ nickname: %w[username nickname],
+ name: [
+ 'name',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/name'
+ ],
+ email: [
+ 'email',
+ 'mail',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/emailaddress'
+ ],
+ first_name: [
+ 'first_name',
+ 'firstname',
+ 'firstName',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/givenname'
+ ],
+ last_name: [
+ 'last_name',
+ 'lastname',
+ 'lastName',
+ 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname',
+ 'http://schemas.microsoft.com/ws/2008/06/identity/claims/surname'
+ ]
+ }
+ )
+ end
+ end
+
describe '#external_groups' do
let(:config_1) { described_class.new('saml1') }
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 020089b3880..9974e24ad50 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -45,26 +45,26 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
end
- it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes and ai_features' do
+ it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: false)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
- it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes and ai_features' do
+ it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
- it 'contains for project all resource bot scopes without ai_features' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ it 'contains for project all resource bot scopes' do
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for group all resource bot scopes' do
group = build_stubbed(:group).tap { |g| g.namespace_settings = build_stubbed(:namespace_settings, namespace: g) }
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for unsupported type no scopes' do
@@ -75,34 +75,6 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
end
- describe 'ai_features scope' do
- let(:resource) { nil }
-
- subject { described_class.available_scopes_for(resource) }
-
- context 'when resource is user', 'and user has a group with ai features' do
- let(:resource) { build_stubbed(:user) }
-
- it { is_expected.not_to include(:ai_features) }
- end
-
- context 'when resource is project' do
- let(:resource) { build_stubbed(:project) }
-
- it 'does not include ai_features scope' do
- is_expected.not_to include(:ai_features)
- end
- end
-
- context 'when resource is group' do
- let(:resource) { build_stubbed(:group) }
-
- it 'does not include ai_features scope' do
- is_expected.not_to include(:ai_features)
- end
- end
- end
-
context 'with observability_tracing feature flag' do
context 'when disabled' do
before do
@@ -114,7 +86,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
end
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes without observability scopes' do
@@ -123,7 +95,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
project = build_stubbed(:project, namespace: group)
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
end
@@ -140,17 +112,17 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'contains for group all resource bot scopes including observability scopes' do
- expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes including observability scopes' do
- expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy]
+ expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for other group all resource bot scopes without observability scopes' do
@@ -159,7 +131,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
g.namespace_settings = build_stubbed(:namespace_settings, namespace: g)
end
- expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
it 'contains for other project all resource bot scopes without observability scopes' do
@@ -169,7 +141,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
other_project = build_stubbed(:project, namespace: other_group)
- expect(subject.available_scopes_for(other_project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy]
+ expect(subject.available_scopes_for(other_project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features]
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb b/spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb
new file mode 100644
index 00000000000..d985e7fae61
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_branch_protection_namespace_setting_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillBranchProtectionNamespaceSetting,
+ feature_category: :source_code_management do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespace_settings_table) { table(:namespace_settings) }
+ let(:group_namespace) do
+ namespaces_table.create!(name: 'group_namespace', path: 'path-1', type: 'Group', default_branch_protection: 0)
+ end
+
+ let(:user_namespace) do
+ namespaces_table.create!(name: 'user_namespace', path: 'path-2', type: 'User', default_branch_protection: 1)
+ end
+
+ let(:user_three_namespace) do
+ namespaces_table.create!(name: 'user_three_namespace', path: 'path-3', type: 'User', default_branch_protection: 2)
+ end
+
+ let(:group_four_namespace) do
+ namespaces_table.create!(name: 'group_four_namespace', path: 'path-4', type: 'Group', default_branch_protection: 3)
+ end
+
+ let(:group_five_namespace) do
+ namespaces_table.create!(name: 'group_five_namespace', path: 'path-5', type: 'Group', default_branch_protection: 4)
+ end
+
+ let(:start_id) { group_namespace.id }
+ let(:end_id) { group_five_namespace.id }
+
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: start_id,
+ end_id: end_id,
+ batch_table: :namespace_settings,
+ batch_column: :namespace_id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ before do
+ namespace_settings_table.create!(namespace_id: group_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: user_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: user_three_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: group_four_namespace.id, default_branch_protection_defaults: {})
+ namespace_settings_table.create!(namespace_id: group_five_namespace.id, default_branch_protection_defaults: {})
+ end
+
+ it 'updates default_branch_protection_defaults to a correct value', :aggregate_failures do
+ expect(ActiveRecord::QueryRecorder.new { perform_migration }.count).to eq(16)
+
+ expect(migrated_attribute(group_namespace.id)).to eq({ "allow_force_push" => true,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(user_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 30 }] })
+ expect(migrated_attribute(user_three_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(group_four_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 30 }],
+ "allowed_to_push" => [{ "access_level" => 40 }] })
+ expect(migrated_attribute(group_five_namespace.id)).to eq({ "allow_force_push" => false,
+ "allowed_to_merge" => [{ "access_level" => 40 }],
+ "allowed_to_push" => [{ "access_level" => 40 }],
+ "developer_can_initial_push" => true })
+ end
+
+ def migrated_attribute(namespace_id)
+ namespace_settings_table.find(namespace_id).default_branch_protection_defaults
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
index b3f04055e0a..edf972189b2 100644
--- a/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_imported_issue_search_data_spec.rb
@@ -1,11 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
-require_migration!
RSpec.describe Gitlab::BackgroundMigration::BackfillImportedIssueSearchData,
:migration,
- schema: 20220707075300 do
+ schema: 20221111123146 do
let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let!(:issue_search_data_table) { table(:issue_search_data) }
diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb
index 3c0b7766871..925fb0c9a20 100644
--- a/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_integrations_enable_ssl_verification_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsEnableSslVerification, schema: 20220425121410 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsEnableSslVerification, schema: 20221111123146 do
let(:migration) { described_class.new }
let(:integrations) { described_class::Integration }
diff --git a/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb b/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
index 40a4758ba5f..e948717d693 100644
--- a/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillInternalOnNotes, :migration, schema: 20220920124709 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillInternalOnNotes, :migration, schema: 20211202041233 do
let(:notes_table) { table(:notes) }
let!(:confidential_note) { notes_table.create!(id: 1, confidential: true, internal: false) }
diff --git a/spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb
new file mode 100644
index 00000000000..8679a8fab8a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_merge_request_diffs_project_id_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestDiffsProjectId,
+ feature_category: :code_review_workflow,
+ schema: 20231114034017 do # schema before we introduced the invalid not-null constraint
+ let!(:tags_without_project_id) do
+ 13.times do
+ namespace = table(:namespaces).create!(name: 'my namespace', path: 'my-namespace')
+ project = table(:projects).create!(name: 'my project', path: 'my-project', namespace_id: namespace.id,
+ project_namespace_id: namespace.id)
+ merge_request = table(:merge_requests).create!(target_project_id: project.id, target_branch: 'main',
+ source_branch: 'not-main')
+ table(:merge_request_diffs).create!(merge_request_id: merge_request.id, project_id: nil)
+ end
+ end
+
+ let!(:start_id) { table(:merge_request_diffs).minimum(:id) }
+ let!(:end_id) { table(:merge_request_diffs).maximum(:id) }
+
+ let!(:migration) do
+ described_class.new(
+ start_id: start_id,
+ end_id: end_id,
+ batch_table: :merge_request_diffs,
+ batch_column: :id,
+ sub_batch_size: 10,
+ pause_ms: 2,
+ connection: ::ApplicationRecord.connection
+ )
+ end
+
+ it 'backfills the missing project_id for the batch' do
+ backfilled_diffs = table(:merge_request_diffs)
+ .joins('INNER JOIN merge_requests ON merge_request_diffs.merge_request_id = merge_requests.id')
+ .where('merge_request_diffs.project_id = merge_requests.target_project_id')
+
+ expect do
+ migration.perform
+ end.to change { backfilled_diffs.count }.from(0).to(13)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
deleted file mode 100644
index 2949bc068c8..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForProjectRoute, schema: 20220606060825 do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:routes) { table(:routes) }
-
- let(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'space1') }
- let(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'space2') }
- let(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'space3') }
-
- let(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id) }
- let(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace2.id) }
- let(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace3.id) }
- let(:proj_namespace4) { namespaces.create!(name: 'proj4', path: 'proj4', type: 'Project', parent_id: namespace3.id) }
-
- # rubocop:disable Layout/LineLength
- let(:proj1) { projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id) }
- let(:proj2) { projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace2.id, project_namespace_id: proj_namespace2.id) }
- let(:proj3) { projects.create!(name: 'proj3', path: 'proj3', namespace_id: namespace3.id, project_namespace_id: proj_namespace3.id) }
- let(:proj4) { projects.create!(name: 'proj4', path: 'proj4', namespace_id: namespace3.id, project_namespace_id: proj_namespace4.id) }
- # rubocop:enable Layout/LineLength
-
- let!(:namespace_route1) { routes.create!(path: 'space1', source_id: namespace1.id, source_type: 'Namespace') }
- let!(:namespace_route2) { routes.create!(path: 'space1/space2', source_id: namespace2.id, source_type: 'Namespace') }
- let!(:namespace_route3) { routes.create!(path: 'space1/space3', source_id: namespace3.id, source_type: 'Namespace') }
-
- let!(:proj_route1) { routes.create!(path: 'space1/proj1', source_id: proj1.id, source_type: 'Project') }
- let!(:proj_route2) { routes.create!(path: 'space1/space2/proj2', source_id: proj2.id, source_type: 'Project') }
- let!(:proj_route3) { routes.create!(path: 'space1/space3/proj3', source_id: proj3.id, source_type: 'Project') }
- let!(:proj_route4) { routes.create!(path: 'space1/space3/proj4', source_id: proj4.id, source_type: 'Project') }
-
- subject(:perform_migration) { migration.perform(proj_route1.id, proj_route4.id, :routes, :id, 2, 0) }
-
- it 'backfills namespace_id for the selected records', :aggregate_failures do
- perform_migration
-
- expected_namespaces = [proj_namespace1.id, proj_namespace2.id, proj_namespace3.id, proj_namespace4.id]
-
- expected_projects = [proj_route1.id, proj_route2.id, proj_route3.id, proj_route4.id]
- expect(routes.where.not(namespace_id: nil).pluck(:id)).to match_array(expected_projects)
- expect(routes.where.not(namespace_id: nil).pluck(:namespace_id)).to match_array(expected_namespaces)
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
index 6a55c6951d5..c2c5c3e9de0 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdOfVulnerabilityReads, schema: 20220722145845 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdOfVulnerabilityReads, schema: 20221111123146 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
index ae296483166..73661a3da1f 100644
--- a/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
-require_migration!
# rubocop:disable Layout/HashAlignment
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectImportLevel do
diff --git a/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
deleted file mode 100644
index 96f49624d22..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectMemberNamespaceId, :migration, schema: 20220516054011 do
- let(:migration) do
- described_class.new(
- start_id: 1, end_id: 10,
- batch_table: table_name, batch_column: batch_column,
- sub_batch_size: sub_batch_size, pause_ms: pause_ms,
- connection: ApplicationRecord.connection
- )
- end
-
- let(:members_table) { table(:members) }
- let(:projects_table) { table(:projects) }
- let(:namespaces_table) { table(:namespaces) }
-
- let(:table_name) { 'members' }
- let(:batch_column) { :id }
- let(:sub_batch_size) { 100 }
- let(:pause_ms) { 0 }
-
- subject(:perform_migration) do
- migration.perform
- end
-
- before do
- namespaces_table.create!(id: 201, name: 'group1', path: 'group1', type: 'Group')
- namespaces_table.create!(id: 202, name: 'group2', path: 'group2', type: 'Group')
- namespaces_table.create!(id: 300, name: 'project-namespace-1', path: 'project-namespace-1-path', type: 'Project')
- namespaces_table.create!(id: 301, name: 'project-namespace-2', path: 'project-namespace-2-path', type: 'Project')
- namespaces_table.create!(id: 302, name: 'project-namespace-3', path: 'project-namespace-3-path', type: 'Project')
-
- projects_table.create!(id: 100, name: 'project1', path: 'project1', namespace_id: 202, project_namespace_id: 300)
- projects_table.create!(id: 101, name: 'project2', path: 'project2', namespace_id: 202, project_namespace_id: 301)
- projects_table.create!(id: 102, name: 'project3', path: 'project3', namespace_id: 202, project_namespace_id: 302)
-
- # project1, no member namespace (fill in)
- members_table.create!(
- id: 1, source_id: 100,
- source_type: 'Project', type: 'ProjectMember',
- member_namespace_id: nil, access_level: 10, notification_level: 3
- )
-
- # bogus source id, no member namespace id (do nothing)
- members_table.create!(
- id: 2, source_id: non_existing_record_id,
- source_type: 'Project', type: 'ProjectMember',
- member_namespace_id: nil, access_level: 10, notification_level: 3
- )
-
- # project3, existing member namespace id (do nothing)
- members_table.create!(
- id: 3, source_id: 102,
- source_type: 'Project', type: 'ProjectMember',
- member_namespace_id: 300, access_level: 10, notification_level: 3
- )
-
- # Group memberships (do not change)
- # group1, no member namespace (do nothing)
- members_table.create!(
- id: 4, source_id: 201,
- source_type: 'Namespace', type: 'GroupMember',
- member_namespace_id: nil, access_level: 10, notification_level: 3
- )
-
- # group2, existing member namespace (do nothing)
- members_table.create!(
- id: 5, source_id: 202,
- source_type: 'Namespace', type: 'GroupMember',
- member_namespace_id: 201, access_level: 10, notification_level: 3
- )
-
- # Project Namespace memberships (do not change)
- # project namespace, existing member namespace (do nothing)
- members_table.create!(
- id: 6, source_id: 300,
- source_type: 'Namespace', type: 'ProjectNamespaceMember',
- member_namespace_id: 201, access_level: 10, notification_level: 3
- )
-
- # project namespace, not member namespace (do nothing)
- members_table.create!(
- id: 7, source_id: 301,
- source_type: 'Namespace', type: 'ProjectNamespaceMember',
- member_namespace_id: 201, access_level: 10, notification_level: 3
- )
- end
-
- it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 2
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- # rubocop:disable Layout/LineLength
- expect(queries.count).to eq(3)
- expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1 # just the bogus one
- expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 300, 300])
- expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
- expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([nil, 201])
- # rubocop:enable Layout/LineLength
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- context 'when given a negative pause_ms' do
- let(:pause_ms) { -9 }
- let(:sub_batch_size) { 2 }
-
- it 'uses 0 as a floor for pause_ms' do
- expect(migration).to receive(:sleep).with(0)
-
- perform_migration
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb b/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb
index 3c46456eed0..3eb0000877d 100644
--- a/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillTopicsTitle, schema: 20220331133802 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillTopicsTitle, schema: 20221111123146 do
it 'correctly backfills the title of the topics' do
topics = table(:topics)
diff --git a/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb b/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
index 04ada1703bc..1d1853b032c 100644
--- a/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillUserDetailsFields, :migration, schema: 20221018232820 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillUserDetailsFields, :migration, schema: 20221111123146 do
let(:users) { table(:users) }
let(:user_details) { table(:user_details) }
diff --git a/spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb
new file mode 100644
index 00000000000..bf1fce4094e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_uuid_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillVsCodeSettingsUuid, schema: 20231130140901, feature_category: :web_ide do
+ let!(:vs_code_settings) { table(:vs_code_settings) }
+ let!(:users) { table(:users) }
+
+ let!(:user) do
+ users.create!(
+ email: "test1@example.com",
+ username: "test1",
+ notification_email: "test@example.com",
+ name: "test",
+ state: "active",
+ projects_limit: 10)
+ end
+
+ subject(:migration) do
+ described_class.new(
+ start_id: vs_code_setting_one.id,
+ end_id: vs_code_setting_two.id,
+ batch_table: :vs_code_settings,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe "#perform" do
+ context 'when it finds vs_code_setting rows with empty uuid' do
+ let(:vs_code_setting_one) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'profiles', content: '{}')
+ end
+
+ let(:vs_code_setting_two) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'tasks', content: '{}')
+ end
+
+ it 'populates uuid column with a generated uuid' do
+ expect(vs_code_setting_one.uuid).to be_nil
+ expect(vs_code_setting_two.uuid).to be_nil
+
+ migration.perform
+
+ expect(vs_code_setting_one.reload.uuid).not_to be_nil
+ expect(vs_code_setting_two.reload.uuid).not_to be_nil
+ end
+ end
+
+ context 'when it finds vs_code_setting rows with non-empty uuid' do
+ let(:vs_code_setting_one) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'profiles', content: '{}', uuid: SecureRandom.uuid)
+ end
+
+ let(:vs_code_setting_two) do
+ vs_code_settings.create!(user_id: user.id, setting_type: 'tasks', content: '{}')
+ end
+
+ it 'populates uuid column with a generated uuid' do
+ expect(vs_code_setting_one.uuid).not_to be_nil
+ expect(vs_code_setting_two.uuid).to be_nil
+
+ previous_uuid = vs_code_setting_one.uuid
+
+ migration.perform
+
+ expect(vs_code_setting_one.reload.uuid).to eq(previous_uuid)
+ expect(vs_code_setting_two.reload.uuid).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb b/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
index 3f1a57434a7..63135971cd3 100644
--- a/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillVulnerabilityReadsClusterAgent, :migration, schema: 20220525221133 do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::BackfillVulnerabilityReadsClusterAgent, :migration, schema: 20221111123146 do # rubocop:disable Layout/LineLength
let(:migration) do
described_class.new(
start_id: 1, end_id: 10,
diff --git a/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb
index c7e4095a488..4c1af279804 100644
--- a/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillWorkItemTypeIdForIssues,
:migration,
- schema: 20220825142324,
+ schema: 20221111123146,
feature_category: :team_planning do
let(:batch_column) { 'id' }
let(:sub_batch_size) { 2 }
diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
deleted file mode 100644
index a09d5559d33..00000000000
--- a/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
-RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedRoutes, schema: 20220606060825 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:routes) { table(:routes) }
-
- let!(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'space1') }
- let!(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'space2') }
- let!(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'space3') }
-
- let!(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id) }
- let!(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace2.id) }
- let!(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace3.id) }
-
- # rubocop:disable Layout/LineLength
- let!(:proj1) { projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id) }
- let!(:proj2) { projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace2.id, project_namespace_id: proj_namespace2.id) }
- let!(:proj3) { projects.create!(name: 'proj3', path: 'proj3', namespace_id: namespace3.id, project_namespace_id: proj_namespace3.id) }
-
- # valid namespace routes with not null namespace_id
- let!(:namespace_route1) { routes.create!(path: 'space1', source_id: namespace1.id, source_type: 'Namespace', namespace_id: namespace1.id) }
- # valid namespace routes with null namespace_id
- let!(:namespace_route2) { routes.create!(path: 'space1/space2', source_id: namespace2.id, source_type: 'Namespace') }
- let!(:namespace_route3) { routes.create!(path: 'space1/space3', source_id: namespace3.id, source_type: 'Namespace') }
- # invalid/orphaned namespace route
- let!(:orphaned_namespace_route_a) { routes.create!(path: 'space1/space4', source_id: non_existing_record_id, source_type: 'Namespace') }
- let!(:orphaned_namespace_route_b) { routes.create!(path: 'space1/space5', source_id: non_existing_record_id - 1, source_type: 'Namespace') }
-
- # valid project routes with not null namespace_id
- let!(:proj_route1) { routes.create!(path: 'space1/proj1', source_id: proj1.id, source_type: 'Project', namespace_id: proj_namespace1.id) }
- # valid project routes with null namespace_id
- let!(:proj_route2) { routes.create!(path: 'space1/space2/proj2', source_id: proj2.id, source_type: 'Project') }
- let!(:proj_route3) { routes.create!(path: 'space1/space3/proj3', source_id: proj3.id, source_type: 'Project') }
- # invalid/orphaned namespace route
- let!(:orphaned_project_route_a) { routes.create!(path: 'space1/space3/proj5', source_id: non_existing_record_id, source_type: 'Project') }
- let!(:orphaned_project_route_b) { routes.create!(path: 'space1/space3/proj6', source_id: non_existing_record_id - 1, source_type: 'Project') }
- # rubocop:enable Layout/LineLength
-
- let!(:migration_attrs) do
- {
- start_id: Route.minimum(:id),
- end_id: Route.maximum(:id),
- batch_table: :routes,
- batch_column: :id,
- sub_batch_size: 100,
- pause_ms: 0,
- connection: ApplicationRecord.connection
- }
- end
-
- let!(:migration) { described_class.new(**migration_attrs) }
-
- subject(:perform_migration) { migration.perform }
-
- it 'cleans orphaned routes', :aggregate_failures do
- all_route_ids = Route.pluck(:id)
-
- orphaned_route_ids = [
- orphaned_namespace_route_a, orphaned_namespace_route_b, orphaned_project_route_a, orphaned_project_route_b
- ].pluck(:id)
- remaining_routes = (all_route_ids - orphaned_route_ids).sort
-
- expect { perform_migration }.to change { Route.pluck(:id) }.to contain_exactly(*remaining_routes)
- expect(Route.all).to all(have_attributes(namespace_id: be_present))
-
- # expect that routes that had namespace_id set did not change namespace_id
- expect(namespace_route1.reload.namespace_id).to eq(namespace1.id)
- expect(proj_route1.reload.namespace_id).to eq(proj_namespace1.id)
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb b/spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb
deleted file mode 100644
index 4e136808a36..00000000000
--- a/spec/lib/gitlab/background_migration/destroy_invalid_group_members_spec.rb
+++ /dev/null
@@ -1,105 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidGroupMembers, :migration, schema: 20220809002011 do
- # rubocop: disable Layout/LineLength
- # rubocop: disable RSpec/ScatteredLet
- let!(:migration_attrs) do
- {
- start_id: 1,
- end_id: 1000,
- batch_table: :members,
- batch_column: :id,
- sub_batch_size: 100,
- pause_ms: 0,
- connection: ApplicationRecord.connection
- }
- end
-
- let!(:migration) { described_class.new(**migration_attrs) }
-
- subject(:perform_migration) { migration.perform }
-
- let(:users_table) { table(:users) }
- let(:namespaces_table) { table(:namespaces) }
- let(:members_table) { table(:members) }
- let(:projects_table) { table(:projects) }
-
- let(:user1) { users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 5) }
- let(:user2) { users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 5) }
- let(:user3) { users_table.create!(name: 'user3', email: 'user3@example.com', projects_limit: 5) }
- let(:user4) { users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 5) }
- let(:user5) { users_table.create!(name: 'user5', email: 'user5@example.com', projects_limit: 5) }
- let(:user6) { users_table.create!(name: 'user6', email: 'user6@example.com', projects_limit: 5) }
-
- let!(:group1) { namespaces_table.create!(name: 'marvellous group 1', path: 'group-path-1', type: 'Group') }
-
- let!(:group2) { namespaces_table.create!(name: 'outstanding group 2', path: 'group-path-2', type: 'Group') }
-
- # create group member records, a mix of both valid and invalid
- # project members will have already been filtered out.
- let!(:group_member1) { create_invalid_group_member(id: 1, user_id: user1.id) }
-
- let!(:group_member4) { create_valid_group_member(id: 4, user_id: user2.id, group_id: group1.id) }
-
- let!(:group_member5) { create_valid_group_member(id: 5, user_id: user3.id, group_id: group2.id) }
-
- let!(:group_member6) { create_invalid_group_member(id: 6, user_id: user4.id) }
-
- let!(:group_member7) { create_valid_group_member(id: 7, user_id: user5.id, group_id: group1.id) }
-
- let!(:group_member8) { create_invalid_group_member(id: 8, user_id: user6.id) }
-
- it 'removes invalid memberships but keeps valid ones', :aggregate_failures do
- expect(members_table.where(type: 'GroupMember').count).to eq 6
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(4)
- expect(members_table.where(type: 'GroupMember').pluck(:id)).to match_array([group_member4, group_member5, group_member7].map(&:id))
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- it 'logs IDs of deleted records' do
- expect(Gitlab::AppLogger).to receive(:info).with({ message: 'Removing invalid group member records',
- deleted_count: 3, ids: [group_member1, group_member6, group_member8].map(&:id) })
-
- perform_migration
- end
-
- def create_invalid_group_member(id:, user_id:)
- members_table.create!(
- id: id,
- user_id: user_id,
- source_id: non_existing_record_id,
- access_level: Gitlab::Access::MAINTAINER,
- type: "GroupMember",
- source_type: "Namespace",
- notification_level: 3,
- member_namespace_id: nil
- )
- end
-
- def create_valid_group_member(id:, user_id:, group_id:)
- members_table.create!(
- id: id,
- user_id: user_id,
- source_id: group_id,
- access_level: Gitlab::Access::MAINTAINER,
- type: "GroupMember",
- source_type: "Namespace",
- member_namespace_id: group_id,
- notification_level: 3
- )
- end
- # rubocop: enable Layout/LineLength
- # rubocop: enable RSpec/ScatteredLet
-end
diff --git a/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb b/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
index e5965d4a1d8..95c5a64bc7b 100644
--- a/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
+++ b/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
# rubocop: disable RSpec/MultipleMemoizedHelpers
-RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidMembers, :migration, schema: 20221004094814 do
+RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidMembers, :migration, schema: 20221111123146 do
let!(:migration_attrs) do
{
start_id: 1,
diff --git a/spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb b/spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb
deleted file mode 100644
index 090c31049b4..00000000000
--- a/spec/lib/gitlab/background_migration/destroy_invalid_project_members_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidProjectMembers, :migration, schema: 20220901035725 do
- # rubocop: disable RSpec/ScatteredLet
- let!(:migration_attrs) do
- {
- start_id: 1,
- end_id: 1000,
- batch_table: :members,
- batch_column: :id,
- sub_batch_size: 100,
- pause_ms: 0,
- connection: ApplicationRecord.connection
- }
- end
-
- let!(:migration) { described_class.new(**migration_attrs) }
-
- subject(:perform_migration) { migration.perform }
-
- let(:users_table) { table(:users) }
- let(:namespaces_table) { table(:namespaces) }
- let(:members_table) { table(:members) }
- let(:projects_table) { table(:projects) }
-
- let(:user1) { users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 5) }
- let(:user2) { users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 5) }
- let(:user3) { users_table.create!(name: 'user3', email: 'user3@example.com', projects_limit: 5) }
- let(:user4) { users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 5) }
- let(:user5) { users_table.create!(name: 'user5', email: 'user5@example.com', projects_limit: 5) }
- let(:user6) { users_table.create!(name: 'user6', email: 'user6@example.com', projects_limit: 5) }
-
- let!(:group1) { namespaces_table.create!(name: 'marvellous group 1', path: 'group-path-1', type: 'Group') }
-
- let!(:project_namespace1) do
- namespaces_table.create!(
- name: 'fabulous project', path: 'project-path-1', type: 'ProjectNamespace', parent_id: group1.id
- )
- end
-
- let!(:project1) do
- projects_table.create!(
- name: 'fabulous project',
- path: 'project-path-1',
- project_namespace_id: project_namespace1.id,
- namespace_id: group1.id
- )
- end
-
- let!(:project_namespace2) do
- namespaces_table.create!(
- name: 'splendiferous project', path: 'project-path-2', type: 'ProjectNamespace', parent_id: group1.id
- )
- end
-
- let!(:project2) do
- projects_table.create!(
- name: 'splendiferous project',
- path: 'project-path-2',
- project_namespace_id: project_namespace2.id,
- namespace_id: group1.id
- )
- end
-
- # create project member records, a mix of both valid and invalid
- # group members will have already been filtered out.
- let!(:project_member1) { create_invalid_project_member(id: 1, user_id: user1.id) }
- let!(:project_member2) { create_valid_project_member(id: 4, user_id: user2.id, project: project1) }
- let!(:project_member3) { create_valid_project_member(id: 5, user_id: user3.id, project: project2) }
- let!(:project_member4) { create_invalid_project_member(id: 6, user_id: user4.id) }
- let!(:project_member5) { create_valid_project_member(id: 7, user_id: user5.id, project: project2) }
- let!(:project_member6) { create_invalid_project_member(id: 8, user_id: user6.id) }
-
- it 'removes invalid memberships but keeps valid ones', :aggregate_failures do
- expect(members_table.where(type: 'ProjectMember').count).to eq 6
-
- queries = ActiveRecord::QueryRecorder.new do
- perform_migration
- end
-
- expect(queries.count).to eq(4)
- expect(members_table.where(type: 'ProjectMember'))
- .to match_array([project_member2, project_member3, project_member5])
- end
-
- it 'tracks timings of queries' do
- expect(migration.batch_metrics.timings).to be_empty
-
- expect { perform_migration }.to change { migration.batch_metrics.timings }
- end
-
- it 'logs IDs of deleted records' do
- expect(Gitlab::AppLogger).to receive(:info).with({
- message: 'Removing invalid project member records',
- deleted_count: 3,
- ids: [project_member1, project_member4, project_member6].map(&:id)
- })
-
- perform_migration
- end
-
- def create_invalid_project_member(id:, user_id:)
- members_table.create!(
- id: id, user_id: user_id, source_id: non_existing_record_id, access_level: Gitlab::Access::MAINTAINER,
- type: "ProjectMember", source_type: "Project", notification_level: 3, member_namespace_id: nil
- )
- end
-
- def create_valid_project_member(id:, user_id:, project:)
- members_table.create!(
- id: id,
- user_id: user_id,
- source_id: project.id,
- access_level: Gitlab::Access::MAINTAINER,
- type: "ProjectMember",
- source_type: "Project",
- member_namespace_id: project.project_namespace_id,
- notification_level: 3
- )
- end
- # rubocop: enable RSpec/ScatteredLet
-end
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
index 93913a2742b..8afbd7403d3 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForNoIssuesNoRepoProjects,
:migration,
- schema: 20220722084543 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
index 285e5ebbee2..185e2da6f1d 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForOneMemberNoRepoProjects,
:migration,
- schema: 20220721031446 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
@@ -13,15 +13,14 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForOne
let(:project_authorizations_table) { table(:project_authorizations) }
subject(:perform_migration) do
- described_class.new(
- start_id: projects_table.minimum(:id),
+ described_class.new(start_id: projects_table.minimum(:id),
end_id: projects_table.maximum(:id),
batch_table: :projects,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
- connection: ActiveRecord::Base.connection
- ).perform
+ connection: ActiveRecord::Base.connection)
+ .perform
end
it 'sets `legacy_open_source_license_available` to false only for public projects with 1 member and no repo',
@@ -43,13 +42,13 @@ RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForOne
def create_legacy_license_public_project(path, repo_size: 0, members: 1)
namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
- project_namespace = namespaces_table.create!(
- name: "-project-namespace-#{path}", path: "project-namespace-#{path}", type: 'Project'
- )
- project = projects_table.create!(
- name: path, path: path, namespace_id: namespace.id,
- project_namespace_id: project_namespace.id, visibility_level: 20
- )
+ project_namespace =
+ namespaces_table.create!(name: "-project-namespace-#{path}", path: "project-namespace-#{path}", type: 'Project')
+ project = projects_table
+ .create!(
+ name: path, path: path, namespace_id: namespace.id,
+ project_namespace_id: project_namespace.id, visibility_level: 20
+ )
members.times do |member_id|
user = users_table.create!(email: "user#{member_id}-project-#{project.id}@gitlab.com", projects_limit: 100)
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
index cf544c87b31..f1ec09840ab 100644
--- a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_projects_less_than_one_mb_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForProjectsLessThanOneMb,
:migration,
- schema: 20220906074449 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
diff --git a/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb b/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb
index ba3aab03f2a..4997ae3763e 100644
--- a/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb
+++ b/spec/lib/gitlab/background_migration/expire_o_auth_tokens_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::ExpireOAuthTokens, :migration, schema: 20220428133724 do
+RSpec.describe Gitlab::BackgroundMigration::ExpireOAuthTokens, :migration, schema: 20221111123146 do
let(:migration) { described_class.new }
let(:oauth_access_tokens_table) { table(:oauth_access_tokens) }
diff --git a/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb b/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
index 5f5dcb35836..cd99557e1d9 100644
--- a/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_allow_descendants_override_disabled_shared_runners_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::FixAllowDescendantsOverrideDisabledSharedRunners, schema: 20230802085923, feature_category: :runner_fleet do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::FixAllowDescendantsOverrideDisabledSharedRunners, schema: 20230802085923,
+ feature_category: :fleet_visibility do
let(:namespaces) { table(:namespaces) }
let!(:valid_enabled) do
diff --git a/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb b/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb
index 7edeaed5794..83d19ef7ce3 100644
--- a/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_human_user_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MigrateHumanUserType, schema: 20230327103401, feature_category: :user_management do # rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::MigrateHumanUserType, feature_category: :user_management do
let!(:valid_users) do
# 13 is the max value we have at the moment.
(0..13).map do |type|
diff --git a/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb b/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
index 74f674e052d..35f93c9982b 100644
--- a/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::PopulateProjectsStarCount, schema: 20221019105041 do
+RSpec.describe Gitlab::BackgroundMigration::PopulateProjectsStarCount, schema: 20221111123146 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb b/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb
index 59d5d56ebe8..74fe2c63139 100644
--- a/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_self_managed_wiki_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::RemoveSelfManagedWikiNotes, :migration, schema: 20220601110011 do
+RSpec.describe Gitlab::BackgroundMigration::RemoveSelfManagedWikiNotes, :migration, schema: 20221111123146 do
let(:notes) { table(:notes) }
subject(:perform_migration) do
diff --git a/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb b/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb
index afdd855c5a8..86c2ab35136 100644
--- a/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb
+++ b/spec/lib/gitlab/background_migration/reset_too_many_tags_skipped_registry_imports_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ResetTooManyTagsSkippedRegistryImports, :migration,
:aggregate_failures,
- schema: 20220502173045 do
+ schema: 20221111123146 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:container_repositories) { table(:container_repositories) }
diff --git a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
index 5109c3ec0c2..5f7a699ac0b 100644
--- a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::SetLegacyOpenSourceLicenseAvailableForNonPublicProjects,
:migration,
- schema: 20220722110026 do
+ schema: 20221111123146 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
deleted file mode 100644
index d468483661a..00000000000
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ /dev/null
@@ -1,559 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, feature_category: :importers do
- include ImportSpecHelper
-
- before do
- stub_omniauth_provider('bitbucket')
- end
-
- let(:statuses) do
- [
- "open",
- "resolved",
- "on hold",
- "invalid",
- "duplicate",
- "wontfix",
- "closed" # undocumented status
- ]
- end
-
- let(:reporters) do
- [
- nil,
- { "nickname" => "reporter1" },
- nil,
- { "nickname" => "reporter2" },
- { "nickname" => "reporter1" },
- nil,
- { "nickname" => "reporter3" }
- ]
- end
-
- let(:sample_issues_statuses) do
- issues = []
-
- statuses.map.with_index do |status, index|
- issues << {
- id: index,
- state: status,
- title: "Issue #{index}",
- kind: 'bug',
- content: {
- raw: "Some content to issue #{index}",
- markup: "markdown",
- html: "Some content to issue #{index}"
- }
- }
- end
-
- reporters.map.with_index do |reporter, index|
- issues[index]['reporter'] = reporter
- end
-
- issues
- end
-
- let_it_be(:project_identifier) { 'namespace/repo' }
-
- let_it_be_with_reload(:project) do
- create(
- :project,
- :repository,
- import_source: project_identifier,
- import_url: "https://bitbucket.org/#{project_identifier}.git",
- import_data_attributes: { credentials: { 'token' => 'token' } }
- )
- end
-
- let(:importer) { described_class.new(project) }
- let(:sample) { RepoHelpers.sample_compare }
- let(:issues_statuses_sample_data) do
- {
- count: sample_issues_statuses.count,
- values: sample_issues_statuses
- }
- end
-
- let(:last_issue_data) do
- {
- page: 1,
- pagelen: 1,
- values: [sample_issues_statuses.last]
- }
- end
-
- let(:counter) { double('counter', increment: true) }
-
- subject { described_class.new(project) }
-
- describe '#import_pull_requests' do
- let(:source_branch_sha) { sample.commits.last }
- let(:merge_commit_sha) { sample.commits.second }
- let(:target_branch_sha) { sample.commits.first }
- let(:pull_request) do
- instance_double(
- Bitbucket::Representation::PullRequest,
- iid: 10,
- source_branch_sha: source_branch_sha,
- source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
- target_branch_sha: target_branch_sha,
- target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
- merge_commit_sha: merge_commit_sha,
- title: 'This is a title',
- description: 'This is a test pull request',
- state: 'merged',
- author: pull_request_author,
- created_at: Time.now,
- updated_at: Time.now)
- end
-
- let(:pull_request_author) { 'other' }
- let(:comments) { [@inline_note, @reply] }
-
- let(:author_line) { "*Created by: someuser*\n\n" }
-
- before do
- allow(subject).to receive(:import_wiki)
- allow(subject).to receive(:import_issues)
-
- # https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
- @inline_note = instance_double(
- Bitbucket::Representation::PullRequestComment,
- iid: 2,
- file_path: '.gitmodules',
- old_pos: nil,
- new_pos: 4,
- note: 'Hello world',
- author: 'someuser',
- created_at: Time.now,
- updated_at: Time.now,
- inline?: true,
- has_parent?: false)
-
- @reply = instance_double(
- Bitbucket::Representation::PullRequestComment,
- iid: 3,
- file_path: '.gitmodules',
- note: 'Hello world',
- author: 'someuser',
- created_at: Time.now,
- updated_at: Time.now,
- inline?: true,
- has_parent?: true,
- parent_id: 2)
-
- allow(subject.client).to receive(:repo)
- allow(subject.client).to receive(:pull_requests).and_return([pull_request])
- allow(subject.client).to receive(:pull_request_comments).with(anything, pull_request.iid).and_return(comments)
- end
-
- it 'imports threaded discussions' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.state).to eq('merged')
- expect(merge_request.notes.count).to eq(2)
- expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
-
- notes = merge_request.notes.order(:id).to_a
- start_note = notes.first
- expect(start_note).to be_a(DiffNote)
- expect(start_note.note).to include(@inline_note.note)
- expect(start_note.note).to include(author_line)
-
- reply_note = notes.last
- expect(reply_note).to be_a(DiffNote)
- expect(reply_note.note).to include(@reply.note)
- expect(reply_note.note).to include(author_line)
- end
-
- context 'when author is blank' do
- let(:pull_request_author) { nil }
-
- it 'adds created by anonymous in the description', :aggregate_failures do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- expect(MergeRequest.first.description).to include('Created by: Anonymous')
- end
- end
-
- context 'when user exists in GitLab' do
- let!(:existing_user) { create(:user, username: 'someuser') }
- let!(:identity) { create(:identity, provider: 'bitbucket', extern_uid: existing_user.username, user: existing_user) }
-
- it 'does not add author line to comments' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
-
- notes = merge_request.notes.order(:id).to_a
- start_note = notes.first
- expect(start_note.note).to eq(@inline_note.note)
- expect(start_note.note).not_to include(author_line)
-
- reply_note = notes.last
- expect(reply_note.note).to eq(@reply.note)
- expect(reply_note.note).not_to include(author_line)
- end
- end
-
- it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
- expect(subject.instance_values['ref_converter']).to receive(:convert_note).twice
-
- subject.execute
- end
-
- context 'when importing a pull request throws an exception' do
- before do
- allow(pull_request).to receive(:raw).and_return({ error: "broken" })
- allow(subject.client).to receive(:pull_request_comments).and_raise(Gitlab::HTTP::Error)
- end
-
- it 'logs an error without the backtrace' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception)
- .with(instance_of(Gitlab::HTTP::Error), hash_including(raw_response: '{"error":"broken"}'))
-
- subject.execute
-
- expect(subject.errors.count).to eq(1)
- expect(subject.errors.first.keys).to match_array(%i[type iid errors])
- end
- end
-
- context 'when source SHA is not found in the repository' do
- let(:source_branch_sha) { 'a' * Commit::MIN_SHA_LENGTH }
- let(:target_branch_sha) { 'c' * Commit::MIN_SHA_LENGTH }
-
- it 'uses merge commit SHA for source' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request_diff = MergeRequest.first.merge_request_diff
- expect(merge_request_diff.head_commit_sha).to eq merge_commit_sha
- expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
- end
-
- context 'when the merge commit SHA is also not found' do
- let(:merge_commit_sha) { 'b' * Commit::MIN_SHA_LENGTH }
-
- it 'uses the pull request sha references' do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request_diff = MergeRequest.first.merge_request_diff
- expect(merge_request_diff.head_commit_sha).to eq source_branch_sha
- expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
- end
- end
- end
-
- context "when target_branch_sha is blank" do
- let(:target_branch_sha) { nil }
-
- it 'creates the merge request with no target branch', :aggregate_failures do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- merge_request = MergeRequest.first
- expect(merge_request.target_branch_sha).to eq(nil)
- end
- end
-
- context 'metrics' do
- before do
- allow(Gitlab::Metrics).to receive(:counter) { counter }
- allow(pull_request).to receive(:raw).and_return('hello world')
- end
-
- it 'counts imported pull requests' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_importer_imported_merge_requests_total,
- 'The number of imported merge (pull) requests'
- )
-
- expect(counter).to receive(:increment)
-
- subject.execute
- end
- end
-
- context 'when pull request was already imported' do
- let(:pull_request_already_imported) do
- instance_double(
- BitbucketServer::Representation::PullRequest,
- iid: 11)
- end
-
- let(:cache_key) do
- format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :pull_requests)
- end
-
- before do
- allow(subject.client).to receive(:pull_requests).and_return([pull_request, pull_request_already_imported])
- Gitlab::Cache::Import::Caching.set_add(cache_key, pull_request_already_imported.iid)
- end
-
- it 'does not import the previously imported pull requests', :aggregate_failures do
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
-
- expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, pull_request.iid)).to eq(true)
- end
- end
- end
-
- context 'issues statuses' do
- before do
- # HACK: Bitbucket::Representation.const_get('Issue') seems to return ::Issue without this
- Bitbucket::Representation::Issue.new({})
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: { has_issues: true, full_name: project_identifier }.to_json)
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=1&sort=-created_on&state=ALL"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: last_issue_data.to_json)
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=50&sort=created_on"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: issues_statuses_sample_data.to_json)
-
- stub_request(:get, "https://api.bitbucket.org/2.0/repositories/namespace/repo?pagelen=50&sort=created_on")
- .with(headers: { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Authorization' => 'Bearer', 'User-Agent' => 'Faraday v0.9.2' })
- .to_return(status: 200, body: "", headers: {})
-
- sample_issues_statuses.each_with_index do |issue, index|
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues/#{issue[:id]}/comments?pagelen=50&sort=created_on"
- ).to_return(
- status: 200,
- headers: { "Content-Type" => "application/json" },
- body: { author_info: { username: "username" }, utc_created_on: index }.to_json
- )
- end
-
- stub_request(
- :get,
- "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/pullrequests?pagelen=50&sort=created_on&state=ALL"
- ).to_return(status: 200,
- headers: { "Content-Type" => "application/json" },
- body: {}.to_json)
- end
-
- context 'creating labels on project' do
- before do
- allow(importer).to receive(:import_wiki)
- end
-
- it 'creates labels as expected' do
- expect { importer.execute }.to change { Label.count }.from(0).to(Gitlab::BitbucketImport::Importer::LABELS.size)
- end
-
- it 'does not fail if label is already existing' do
- label = Gitlab::BitbucketImport::Importer::LABELS.first
- ::Labels::CreateService.new(label).execute(project: project)
-
- expect { importer.execute }.not_to raise_error
- end
-
- it 'does not create new labels' do
- Gitlab::BitbucketImport::Importer::LABELS.each do |label|
- create(:label, project: project, title: label[:title])
- end
-
- expect { importer.execute }.not_to change { Label.count }
- end
-
- it 'does not update existing ones' do
- label_title = Gitlab::BitbucketImport::Importer::LABELS.first[:title]
- existing_label = create(:label, project: project, title: label_title)
- # Reload label from database so we avoid timestamp comparison issues related to time precision when comparing
- # attributes later.
- existing_label.reload
-
- travel_to(Time.now + 1.minute) do
- importer.execute
-
- label_after_import = project.labels.find(existing_label.id)
- expect(label_after_import.attributes).to eq(existing_label.attributes)
- end
- end
-
- it 'raises an error if a label is not valid' do
- stub_const("#{described_class}::LABELS", [{ title: nil, color: nil }])
-
- expect { importer.create_labels }.to raise_error(StandardError, /Failed to create label/)
- end
- end
-
- it 'maps statuses to open or closed' do
- allow(importer).to receive(:import_wiki)
-
- importer.execute
-
- expect(project.issues.where(state_id: Issue.available_states[:closed]).size).to eq(5)
- expect(project.issues.where(state_id: Issue.available_states[:opened]).size).to eq(2)
- expect(project.issues.map(&:namespace_id).uniq).to match_array([project.project_namespace_id])
- end
-
- describe 'wiki import' do
- it 'is skipped when the wiki exists' do
- expect(project.wiki).to receive(:repository_exists?) { true }
- expect(project.wiki.repository).not_to receive(:import_repository)
-
- importer.execute
-
- expect(importer.errors).to be_empty
- end
-
- it 'imports to the project disk_path' do
- expect(project.wiki).to receive(:repository_exists?) { false }
- expect(project.wiki.repository).to receive(:import_repository)
-
- importer.execute
-
- expect(importer.errors).to be_empty
- end
- end
-
- describe 'issue import' do
- it 'allocates internal ids' do
- expect(Issue).to receive(:track_namespace_iid!).with(project.project_namespace, 6)
-
- importer.execute
- end
-
- it 'maps reporters to anonymous if bitbucket reporter is nil' do
- allow(importer).to receive(:import_wiki)
- importer.execute
-
- expect(project.issues.size).to eq(7)
- expect(project.issues.where("description LIKE ?", '%Anonymous%').size).to eq(3)
- expect(project.issues.where("description LIKE ?", '%reporter1%').size).to eq(2)
- expect(project.issues.where("description LIKE ?", '%reporter2%').size).to eq(1)
- expect(project.issues.where("description LIKE ?", '%reporter3%').size).to eq(1)
- expect(importer.errors).to be_empty
- end
-
- it 'sets work item type on new issues' do
- allow(importer).to receive(:import_wiki)
-
- importer.execute
-
- expect(project.issues.map(&:work_item_type_id).uniq).to contain_exactly(WorkItems::Type.default_issue_type.id)
- end
-
- context 'with issue comments' do
- let(:note) { 'Hello world' }
- let(:inline_note) do
- instance_double(Bitbucket::Representation::Comment, note: note, author: 'someuser', created_at: Time.now, updated_at: Time.now)
- end
-
- before do
- allow_next_instance_of(Bitbucket::Client) do |instance|
- allow(instance).to receive(:issue_comments).and_return([inline_note])
- end
- allow(importer).to receive(:import_wiki)
- end
-
- it 'imports issue comments' do
- importer.execute
-
- comment = project.notes.first
- expect(project.notes.size).to eq(7)
- expect(comment.note).to include(note)
- expect(comment.note).to include(inline_note.author)
- expect(importer.errors).to be_empty
- end
-
- it 'calls RefConverter to convert Bitbucket refs to Gitlab refs' do
- expect(importer.instance_values['ref_converter']).to receive(:convert_note).exactly(7).times
-
- importer.execute
- end
- end
-
- context 'when issue was already imported' do
- let(:cache_key) do
- format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :issues)
- end
-
- before do
- Gitlab::Cache::Import::Caching.set_add(cache_key, sample_issues_statuses.first[:id])
- end
-
- it 'does not import previously imported issues', :aggregate_failures do
- expect { subject.execute }.to change { Issue.count }.by(sample_issues_statuses.size - 1)
-
- sample_issues_statuses.each do |sample_issues_status|
- expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, sample_issues_status[:id])).to eq(true)
- end
- end
- end
- end
-
- context 'metrics' do
- before do
- allow(Gitlab::Metrics).to receive(:counter) { counter }
- end
-
- it 'counts imported issues' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_importer_imported_issues_total,
- 'The number of imported issues'
- )
-
- expect(counter).to receive(:increment)
-
- subject.execute
- end
- end
- end
-
- describe '#execute' do
- context 'metrics' do
- let(:histogram) { double(:histogram) }
-
- before do
- allow(subject).to receive(:import_wiki)
- allow(subject).to receive(:import_issues)
- allow(subject).to receive(:import_pull_requests)
-
- allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
- allow(Gitlab::Metrics).to receive(:histogram).and_return(histogram)
- allow(histogram).to receive(:observe)
- allow(counter).to receive(:increment)
- end
-
- it 'counts and measures duration of imported projects' do
- expect(Gitlab::Metrics).to receive(:counter).with(
- :bitbucket_importer_imported_projects_total,
- 'The number of imported projects'
- )
-
- expect(Gitlab::Metrics).to receive(:histogram).with(
- :bitbucket_importer_total_duration_seconds,
- 'Total time spent importing projects, in seconds',
- {},
- Gitlab::Import::Metrics::IMPORT_DURATION_BUCKETS
- )
-
- expect(counter).to receive(:increment)
- expect(histogram).to receive(:observe).with({ importer: :bitbucket_importer }, anything)
-
- subject.execute
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
index af5a929683e..90987f6d3d4 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/issues_importer_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::IssuesImporter, feature_categ
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
index a04543b0511..84dea203478 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/issues_notes_importer_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::IssuesNotesImporter, feature_
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
index 1f36a353724..e5a8a1c906f 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb
@@ -77,6 +77,18 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestImporter, :clean_g
end
end
+ context 'when the source and target projects are different' do
+ let(:importer) { described_class.new(project, hash.merge(source_and_target_project_different: true)) }
+
+ it 'skips the import' do
+ expect(Gitlab::BitbucketImport::Logger)
+ .to receive(:info)
+ .with(include(message: 'skipping because source and target projects are different', iid: anything))
+
+ expect { importer.execute }.not_to change { project.merge_requests.count }
+ end
+ end
+
context 'when the author does not have a bitbucket identity' do
before do
identity.update!(provider: :github)
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
index eba7ec92aba..4d72c47d61a 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, feature
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
index 78a08accf82..b4c26ff7add 100644
--- a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_notes_importer_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsNotesImporter, fe
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
- importer.execute
+ expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb b/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
index 578b661d86b..c458214e794 100644
--- a/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/ref_converter_spec.rb
@@ -19,7 +19,14 @@ RSpec.describe Gitlab::BitbucketImport::RefConverter, feature_category: :importe
context 'when the note has an issue ref' do
let(:note) { "[https://bitbucket.org/namespace/repo/issues/1/first-issue](https://bitbucket.org/namespace/repo/issues/1/first-issue){: data-inline-card='' } " }
- let(:expected) { "[http://localhost/#{path}/-/issues/1/](http://localhost/#{path}/-/issues/1/)" }
+ let(:expected) { "[http://localhost/#{path}/-/issues/1](http://localhost/#{path}/-/issues/1)" }
+
+ it_behaves_like 'converts the ref correctly'
+ end
+
+ context 'when the note references issues without an issue name' do
+ let(:note) { "[https://bitbucket.org/namespace/repo/issues](https://bitbucket.org/namespace/repo/issues){: data-inline-card='' } " }
+ let(:expected) { "[http://localhost/#{path}/-/issues](http://localhost/#{path}/-/issues)" }
it_behaves_like 'converts the ref correctly'
end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
index c7e91c340b0..914ebefdb8f 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporter, feature_category: :importers do
include AfterNextHelpers
- let_it_be(:project) do
+ let_it_be_with_reload(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
@@ -18,21 +18,36 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let_it_be(:pull_request) { BitbucketServer::Representation::PullRequest.new(pull_request_data) }
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
- let_it_be(:pull_request_author) do
+ let!(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
let(:merge_event) do
instance_double(
BitbucketServer::Representation::Activity,
+ id: 3,
comment?: false,
merge_event?: true,
+ approved_event?: false,
committer_email: pull_request_author.email,
merge_timestamp: now,
merge_commit: '12345678'
)
end
+ let(:approved_event) do
+ instance_double(
+ BitbucketServer::Representation::Activity,
+ id: 4,
+ comment?: false,
+ merge_event?: false,
+ approved_event?: true,
+ approver_username: pull_request_author.username,
+ approver_email: pull_request_author.email,
+ created_at: now
+ )
+ end
+
let(:pr_note) do
instance_double(
BitbucketServer::Representation::Comment,
@@ -48,6 +63,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let(:pr_comment) do
instance_double(
BitbucketServer::Representation::Activity,
+ id: 5,
comment?: true,
inline_comment?: false,
merge_event?: false,
@@ -63,9 +79,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
.to receive(:info).with(include(import_stage: stage, message: message))
end
- subject(:importer) { described_class.new(project, pull_request.to_hash) }
+ subject(:importer) { described_class.new(project.reload, pull_request.to_hash) }
- describe '#execute', :clean_gitlab_redis_cache do
+ describe '#execute' do
context 'when a matching merge request is not found' do
it 'does nothing' do
expect { importer.execute }.not_to change { Note.count }
@@ -79,7 +95,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
end
- context 'when a matching merge request is found' do
+ context 'when a matching merge request is found', :clean_gitlab_redis_cache do
let_it_be(:merge_request) { create(:merge_request, iid: pull_request.iid, source_project: project) }
it 'logs its progress' do
@@ -211,6 +227,112 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
expect(merge_request.merge_commit_sha).to eq(merge_event.merge_commit)
end
end
+
+ context 'when PR has an approved event' do
+ before do
+ allow_next(BitbucketServer::Client).to receive(:activities).and_return([approved_event])
+ end
+
+ it 'creates the approval, reviewer and approval note' do
+ expect { importer.execute }
+ .to change { merge_request.approvals.count }.from(0).to(1)
+ .and change { merge_request.notes.count }.from(0).to(1)
+ .and change { merge_request.reviewers.count }.from(0).to(1)
+
+ approval = merge_request.approvals.first
+
+ expect(approval.user).to eq(pull_request_author)
+ expect(approval.created_at).to eq(now)
+
+ note = merge_request.notes.first
+
+ expect(note.note).to eq('approved this merge request')
+ expect(note.author).to eq(pull_request_author)
+ expect(note.system).to be_truthy
+ expect(note.created_at).to eq(now)
+
+ reviewer = merge_request.reviewers.first
+
+ expect(reviewer.id).to eq(pull_request_author.id)
+ end
+
+ context 'when a user with a matching username does not exist' do
+ before do
+ pull_request_author.update!(username: 'another_username')
+ end
+
+ it 'finds the user based on email' do
+ importer.execute
+
+ approval = merge_request.approvals.first
+
+ expect(approval.user).to eq(pull_request_author)
+ end
+
+ context 'when no users match email or username' do
+ let_it_be(:another_author) { create(:user) }
+
+ before do
+ pull_request_author.destroy!
+ end
+
+ it 'does not set an approver' do
+ expect { importer.execute }
+ .to not_change { merge_request.approvals.count }
+ .and not_change { merge_request.notes.count }
+ .and not_change { merge_request.reviewers.count }
+
+ expect(merge_request.approvals).to be_empty
+ end
+ end
+ end
+
+ context 'if the reviewer already existed' do
+ before do
+ merge_request.reviewers = [pull_request_author]
+ merge_request.save!
+ end
+
+ it 'does not create the reviewer record' do
+ expect { importer.execute }.not_to change { merge_request.reviewers.count }
+ end
+ end
+ end
+ end
+
+ shared_examples 'import is skipped' do
+ it 'does not log and does not import notes' do
+ expect(Gitlab::BitbucketServerImport::Logger)
+ .not_to receive(:info).with(include(import_stage: 'import_pull_request_notes', message: 'starting'))
+
+ expect { importer.execute }.not_to change { Note.count }
+ end
+ end
+
+ context 'when the project has been marked as failed' do
+ before do
+ project.import_state.mark_as_failed('error')
+ end
+
+ include_examples 'import is skipped'
+ end
+
+ context 'when the import data does not have credentials' do
+ before do
+ project.import_data.credentials = nil
+ project.import_data.save!
+ end
+
+ include_examples 'import is skipped'
+ end
+
+ context 'when the import data does not have data' do
+ before do
+ project.import_data.data = nil
+ project.import_data.save!
+ end
+
+ include_examples 'import is skipped'
end
end
end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
index af8a0202083..df6dfa3219c 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_requests_importer_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, feature_category: :importers do
+ include RepoHelpers
+
let_it_be(:project) do
create(:project, :with_import_url, :import_started, :empty_repo,
import_data_attributes: {
@@ -15,15 +17,17 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, f
subject(:importer) { described_class.new(project) }
describe '#execute', :clean_gitlab_redis_cache do
+ let(:commit_sha) { 'aaaa1' }
+
before do
allow_next_instance_of(BitbucketServer::Client) do |client|
- allow(client).to receive(:pull_requests).and_return(
+ allow(client).to receive(:pull_requests).with('key', 'slug', a_hash_including(limit: 50)).and_return(
[
BitbucketServer::Representation::PullRequest.new(
{
'id' => 1,
'state' => 'MERGED',
- 'fromRef' => { 'latestCommit' => 'aaaa1' },
+ 'fromRef' => { 'latestCommit' => commit_sha },
'toRef' => { 'latestCommit' => 'aaaa2' }
}
),
@@ -77,15 +81,42 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestsImporter, f
context 'when pull requests are in merged or declined status' do
it 'fetches latest commits from the remote repository' do
+ expected_refmap = [
+ "#{commit_sha}:refs/merge-requests/1/head",
+ 'aaaa2:refs/keep-around/aaaa2',
+ 'bbbb1:refs/merge-requests/2/head',
+ 'bbbb2:refs/keep-around/bbbb2'
+ ]
+
expect(project.repository).to receive(:fetch_remote).with(
project.import_url,
- refmap: %w[aaaa1 aaaa2 bbbb1 bbbb2],
+ refmap: expected_refmap,
prune: false
)
importer.execute
end
+ context 'when a commit already exists' do
+ let_it_be(:commit_sha) { create_file_in_repo(project, 'master', 'master', 'test.txt', 'testing')[:result] }
+
+ it 'does not fetch the commit' do
+ expected_refmap = [
+ 'aaaa2:refs/keep-around/aaaa2',
+ 'bbbb1:refs/merge-requests/2/head',
+ 'bbbb2:refs/keep-around/bbbb2'
+ ]
+
+ expect(project.repository).to receive(:fetch_remote).with(
+ project.import_url,
+ refmap: expected_refmap,
+ prune: false
+ )
+
+ importer.execute
+ end
+ end
+
context 'when feature flag "fetch_commits_for_bitbucket_server" is disabled' do
before do
stub_feature_flags(fetch_commits_for_bitbucket_server: false)
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
new file mode 100644
index 00000000000..33d6ab94513
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::Importers::UsersImporter, feature_category: :importers do
+ let(:logger) { Gitlab::BitbucketServerImport::Logger }
+
+ let_it_be(:project) do
+ create(:project, :with_import_url, :import_started, :empty_repo,
+ import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
+
+ let(:user_1) do
+ BitbucketServer::Representation::User.new(
+ { 'user' => { 'emailAddress' => 'email1', 'slug' => 'username1' } }
+ )
+ end
+
+ let(:user_2) do
+ BitbucketServer::Representation::User.new(
+ { 'user' => { 'emailAddress' => 'email2', 'slug' => 'username2' } }
+ )
+ end
+
+ let(:user_3) do
+ BitbucketServer::Representation::User.new(
+ { 'user' => { 'emailAddress' => 'email3', 'slug' => 'username3' } }
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ allow_next_instance_of(BitbucketServer::Client) do |client|
+ allow(client).to receive(:users).with('key', limit: 2, page_offset: 1).and_return([user_1, user_2])
+ allow(client).to receive(:users).with('key', limit: 2, page_offset: 2).and_return([user_3])
+ allow(client).to receive(:users).with('key', limit: 2, page_offset: 3).and_return([])
+ end
+ end
+
+ subject(:importer) { described_class.new(project) }
+
+ describe '#execute' do
+ it 'writes the username and email to cache for every user in batches' do
+ expect(logger).to receive(:info).with(hash_including(message: 'starting'))
+ expect(logger).to receive(:info).with(hash_including(message: 'importing page 1 using batch size 2'))
+ expect(logger).to receive(:info).with(hash_including(message: 'importing page 2 using batch size 2'))
+ expect(logger).to receive(:info).with(hash_including(message: 'importing page 3 using batch size 2'))
+ expect(logger).to receive(:info).with(hash_including(message: 'finished'))
+
+ expect(Gitlab::Cache::Import::Caching).to receive(:write_multiple).and_call_original.twice
+
+ importer.execute
+
+ cache_key_prefix = "bitbucket_server/project/#{project.id}/source/username"
+ expect(Gitlab::Cache::Import::Caching.read("#{cache_key_prefix}/username1")).to eq('email1')
+ expect(Gitlab::Cache::Import::Caching.read("#{cache_key_prefix}/username2")).to eq('email2')
+ expect(Gitlab::Cache::Import::Caching.read("#{cache_key_prefix}/username3")).to eq('email3')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index 6f9879da281..8f1c552e0b7 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state, feature_category: :importers do
shared_examples 'validated redis value' do
let(:value) { double('value', to_s: Object.new) }
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
expect(redis).to receive(:get).with(/foo/).and_return('bar')
expect(redis).to receive(:expire).with(/foo/, described_class::TIMEOUT)
- expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
+ expect(Gitlab::Redis::Cache).to receive(:with).exactly(4).times.and_yield(redis)
described_class.read('foo')
end
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
expect(redis).to receive(:get).with(/foo/).and_return('')
expect(redis).not_to receive(:expire)
- expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
+ expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
described_class.read('foo')
end
@@ -80,6 +80,10 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
describe '.increment' do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'increment a key and returns the current value' do
expect(described_class.increment('foo')).to eq(1)
diff --git a/spec/lib/gitlab/checks/container_moved_spec.rb b/spec/lib/gitlab/checks/container_moved_spec.rb
index 00ef5604e1d..de7ec330e0c 100644
--- a/spec/lib/gitlab/checks/container_moved_spec.rb
+++ b/spec/lib/gitlab/checks/container_moved_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ContainerMoved, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::Checks::ContainerMoved, :clean_gitlab_redis_shared_state, feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index 8cdee727d3d..a84d9194cd2 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ForcePush do
+RSpec.describe Gitlab::Checks::ForcePush, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
describe '.force_push?' do
diff --git a/spec/lib/gitlab/checks/global_file_size_check_spec.rb b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
index db615053356..9e3be2fd2ec 100644
--- a/spec/lib/gitlab/checks/global_file_size_check_spec.rb
+++ b/spec/lib/gitlab/checks/global_file_size_check_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
context 'when there are oversized blobs' do
let(:mock_blob_id) { "88acbfafb1b8fdb7c51db870babce21bd861ac4f" }
let(:mock_blob_size) { 300 * 1024 * 1024 } # 300 MiB
- let(:size_msg) { "300.0" } # it is (mock_blob_size / 1024.0 / 1024.0).round(2).to_s
+ let(:size_msg) { "300" }
let(:blob_double) { instance_double(Gitlab::Git::Blob, size: mock_blob_size, id: mock_blob_id) }
before do
@@ -53,8 +53,7 @@ RSpec.describe Gitlab::Checks::GlobalFileSizeCheck, feature_category: :source_co
expect(Gitlab::AppJsonLogger).to receive(:info).with('Checking for blobs over the file size limit')
expect(Gitlab::AppJsonLogger).to receive(:info).with(
message: 'Found blob over global limit',
- blob_sizes: [mock_blob_size],
- blob_details: { mock_blob_id => { "size" => mock_blob_size } }
+ blob_details: [{ "id" => mock_blob_id, "size" => mock_blob_size }]
)
expect do
subject.validate!
diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb
index 331a43b814f..9f001dd1941 100644
--- a/spec/lib/gitlab/checks/lfs_check_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::LfsCheck do
+RSpec.describe Gitlab::Checks::LfsCheck, feature_category: :source_code_management do
include_context 'changes access checks context'
let(:blob_object) { project.repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') }
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
index abad2bfa905..0aecf26f42f 100644
--- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::LfsIntegrity do
+RSpec.describe Gitlab::Checks::LfsIntegrity, feature_category: :source_code_management do
include ProjectForksHelper
let!(:time_left) { 50 }
diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
index 5397aea90a9..85fe669b8cf 100644
--- a/spec/lib/gitlab/checks/matching_merge_request_spec.rb
+++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::MatchingMergeRequest do
+RSpec.describe Gitlab::Checks::MatchingMergeRequest, feature_category: :source_code_management do
describe '#match?' do
let_it_be(:newrev) { '012345678' }
let_it_be(:target_branch) { 'feature' }
diff --git a/spec/lib/gitlab/checks/project_created_spec.rb b/spec/lib/gitlab/checks/project_created_spec.rb
index 6a2e4201030..74961a61892 100644
--- a/spec/lib/gitlab/checks/project_created_spec.rb
+++ b/spec/lib/gitlab/checks/project_created_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state, feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/checks/push_check_spec.rb b/spec/lib/gitlab/checks/push_check_spec.rb
index 262438256b4..f61e4c39715 100644
--- a/spec/lib/gitlab/checks/push_check_spec.rb
+++ b/spec/lib/gitlab/checks/push_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::PushCheck do
+RSpec.describe Gitlab::Checks::PushCheck, feature_category: :source_code_management do
include_context 'change access checks context'
describe '#validate!' do
diff --git a/spec/lib/gitlab/checks/push_file_count_check_spec.rb b/spec/lib/gitlab/checks/push_file_count_check_spec.rb
index e05070e8f35..fef45588979 100644
--- a/spec/lib/gitlab/checks/push_file_count_check_spec.rb
+++ b/spec/lib/gitlab/checks/push_file_count_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::PushFileCountCheck do
+RSpec.describe Gitlab::Checks::PushFileCountCheck, feature_category: :source_code_management do
let(:snippet) { create(:personal_snippet, :repository) }
let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } }
let(:timeout) { Gitlab::GitAccess::INTERNAL_TIMEOUT }
diff --git a/spec/lib/gitlab/checks/single_change_access_spec.rb b/spec/lib/gitlab/checks/single_change_access_spec.rb
index 8d9f96dd2b4..9ae3e4b246d 100644
--- a/spec/lib/gitlab/checks/single_change_access_spec.rb
+++ b/spec/lib/gitlab/checks/single_change_access_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::SingleChangeAccess do
+RSpec.describe Gitlab::Checks::SingleChangeAccess, feature_category: :source_code_management do
describe '#validate!' do
include_context 'change access checks context'
diff --git a/spec/lib/gitlab/checks/snippet_check_spec.rb b/spec/lib/gitlab/checks/snippet_check_spec.rb
index c43b65d09c5..7763cc82681 100644
--- a/spec/lib/gitlab/checks/snippet_check_spec.rb
+++ b/spec/lib/gitlab/checks/snippet_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::SnippetCheck do
+RSpec.describe Gitlab::Checks::SnippetCheck, feature_category: :source_code_management do
include_context 'change access checks context'
let_it_be(:snippet) { create(:personal_snippet, :repository) }
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
index 2b1fbc7e797..15c6b906689 100644
--- a/spec/lib/gitlab/checks/tag_check_spec.rb
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -11,126 +11,138 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
it 'raises an error when user does not have access' do
allow(user_access).to receive(:can_do_action?).with(:admin_tag).and_return(false)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to change existing tags on this project.')
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to change existing tags on this project.'
+ )
end
- context "prohibited tags check" do
+ describe "prohibited tags check" do
it 'prohibits tags name that include refs/heads at the head' do
- allow(subject).to receive(:tag_name).and_return("refs/heads/foo")
+ allow(change_check).to receive(:tag_name).and_return("refs/heads/foo")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a tag with a prohibited pattern.")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "You cannot create a tag with a prohibited pattern."
+ )
end
it "prohibits tag names that include refs/tags/ at the head" do
- allow(subject).to receive(:tag_name).and_return("refs/tags/foo")
+ allow(change_check).to receive(:tag_name).and_return("refs/tags/foo")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a tag with a prohibited pattern.")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "You cannot create a tag with a prohibited pattern."
+ )
end
it "doesn't prohibit a nested refs/tags/ string in a tag name" do
- allow(subject).to receive(:tag_name).and_return("fix-for-refs/tags/foo")
-
- expect { subject.validate! }.not_to raise_error
- end
+ allow(change_check).to receive(:tag_name).and_return("fix-for-refs/tags/foo")
- context "deleting a refs/tags headed tag" do
- let(:newrev) { "0000000000000000000000000000000000000000" }
- let(:ref) { "refs/tags/refs/tags/267208abfe40e546f5e847444276f7d43a39503e" }
-
- it "doesn't prohibit the deletion of a refs/tags/ tag name" do
- expect { subject.validate! }.not_to raise_error
- end
+ expect { change_check.validate! }.not_to raise_error
end
it "prohibits tag names that include characters incompatible with UTF-8" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "Tag names must be valid when converted to UTF-8 encoding")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "Tag names must be valid when converted to UTF-8 encoding"
+ )
end
it "doesn't prohibit UTF-8 compatible characters" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
context "when prohibited_tag_name_encoding_check feature flag is disabled" do
before do
stub_feature_flags(prohibited_tag_name_encoding_check: false)
- allow(subject).to receive(:validate_tag_name_not_sha_like!)
+ allow(change_check).to receive(:validate_tag_name_not_sha_like!)
end
it "doesn't prohibit tag names that include characters incompatible with UTF-8" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
it "doesn't prohibit UTF-8 compatible characters" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+
+ expect { change_check.validate! }.not_to raise_error
+ end
+ end
- expect { subject.validate! }.not_to raise_error
+ describe "deleting a refs/tags headed tag" do
+ let(:newrev) { "0000000000000000000000000000000000000000" }
+ let(:ref) { "refs/tags/refs/tags/267208abfe40e546f5e847444276f7d43a39503e" }
+
+ it "doesn't prohibit the deletion of a refs/tags/ tag name" do
+ expect { change_check.validate! }.not_to raise_error
end
end
it "forbids SHA-1 values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("267208abfe40e546f5e847444276f7d43a39503e")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "forbids SHA-256 values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "forbids '{SHA-1}{+anything}' values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("267208abfe40e546f5e847444276f7d43a39503e-")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "forbids '{SHA-256}{+anything} values" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175-")
- expect { subject.validate! }.to raise_error(
+ expect { change_check.validate! }.to raise_error(
Gitlab::GitAccess::ForbiddenError,
"You cannot create a tag with a SHA-1 or SHA-256 tag name."
)
end
it "allows SHA-1 values to be appended to the tag name" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("fix-267208abfe40e546f5e847444276f7d43a39503e")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
it "allows SHA-256 values to be appended to the tag name" do
- allow(subject)
+ allow(change_check)
.to receive(:tag_name)
.and_return("fix-09b9fd3ea68e9b95a51b693a29568c898e27d1476bbd83c825664f18467fc175")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
end
@@ -142,31 +154,36 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
project.add_maintainer(user)
end
- context 'deletion' do
+ describe 'deleting a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
- context 'via web interface' do
+ context 'when deleting via web interface' do
let(:protocol) { 'web' }
it 'is allowed' do
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
end
- context 'via SSH' do
+ context 'when deleting via SSH' do
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /only delete.*web interface/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You can only delete protected tags using the web interface.'
+ )
end
end
end
- context 'update' do
+ describe 'updating a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot be updated/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError, 'Protected tags cannot be updated.'
+ )
end
end
end
@@ -176,37 +193,47 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
project.add_developer(user)
end
- context 'deletion' do
+ describe 'deleting a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /not allowed to delete/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to delete protected tags from this project. ' \
+ 'Only a project maintainer or owner can delete a protected tag.'
+ )
end
end
end
- context 'creation' do
+ describe 'creating a tag' do
let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:ref) { 'refs/tags/v9.1.0' }
it 'prevents creation below access level' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /allowed to create this tag as it is protected/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to create this tag as it is protected.'
+ )
end
context 'when user has access' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, project: project, name: 'v*') }
it 'allows tag creation' do
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
context 'when tag name is the same as default branch' do
let(:ref) { "refs/tags/#{project.default_branch}" }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot use default branch name to create a tag/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You cannot use default branch name to create a tag'
+ )
end
end
end
diff --git a/spec/lib/gitlab/checks/timed_logger_spec.rb b/spec/lib/gitlab/checks/timed_logger_spec.rb
index 261fdd6c002..e5c76afff3c 100644
--- a/spec/lib/gitlab/checks/timed_logger_spec.rb
+++ b/spec/lib/gitlab/checks/timed_logger_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::TimedLogger do
+RSpec.describe Gitlab::Checks::TimedLogger, feature_category: :source_code_management do
let!(:timeout) { 50.seconds }
let!(:start) { Time.now }
let!(:ref) { "bar" }
diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb
index 4895077a731..f8c0d69be2e 100644
--- a/spec/lib/gitlab/ci/build/image_spec.rb
+++ b/spec/lib/gitlab/ci/build/image_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined in job' do
let(:image_name) { 'image:1.0' }
- let(:job) { create(:ci_build, options: { image: image_name } ) }
+ let(:job) { create(:ci_build, options: { image: image_name }) }
context 'when image is defined as string' do
it 'fabricates an object of the proper class' do
@@ -29,12 +29,14 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined as hash' do
let(:entrypoint) { '/bin/sh' }
let(:pull_policy) { %w[always if-not-present] }
+ let(:executor_opts) { { docker: { platform: 'arm64' } } }
let(:job) do
create(:ci_build, options: { image: { name: image_name,
entrypoint: entrypoint,
ports: [80],
- pull_policy: pull_policy } } )
+ executor_opts: executor_opts,
+ pull_policy: pull_policy } })
end
it 'fabricates an object of the proper class' do
@@ -44,6 +46,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
it 'populates fabricated object with the proper attributes' do
expect(subject.name).to eq(image_name)
expect(subject.entrypoint).to eq(entrypoint)
+ expect(subject.executor_opts).to eq(executor_opts)
expect(subject.pull_policy).to eq(pull_policy)
end
@@ -98,11 +101,12 @@ RSpec.describe Gitlab::Ci::Build::Image do
let(:service_entrypoint) { '/bin/sh' }
let(:service_alias) { 'db' }
let(:service_command) { 'sleep 30' }
+ let(:executor_opts) { { docker: { platform: 'amd64' } } }
let(:pull_policy) { %w[always if-not-present] }
let(:job) do
create(:ci_build, options: { services: [{ name: service_image_name, entrypoint: service_entrypoint,
alias: service_alias, command: service_command, ports: [80],
- pull_policy: pull_policy }] })
+ executor_opts: executor_opts, pull_policy: pull_policy }] })
end
it 'fabricates an non-empty array of objects' do
@@ -116,6 +120,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
expect(subject.first.entrypoint).to eq(service_entrypoint)
expect(subject.first.alias).to eq(service_alias)
expect(subject.first.command).to eq(service_command)
+ expect(subject.first.executor_opts).to eq(executor_opts)
expect(subject.first.pull_policy).to eq(pull_policy)
port = subject.first.ports.first
diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb
index 4ba963b54b5..b9b4c3f7c69 100644
--- a/spec/lib/gitlab/ci/components/instance_path_spec.rb
+++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb
@@ -42,48 +42,86 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
end
- context 'when the component is simple (single file template)' do
- it 'fetches the component content', :aggregate_failures do
+ shared_examples 'does not find the component' do
+ it 'returns nil' do
result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine_1')
- expect(result.path).to eq('templates/secret-detection.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
+ expect(result).to be_nil
+ end
+ end
+
+ shared_examples 'finds the component' do
+ shared_examples 'fetches the component content' do
+ it 'fetches the component content', :aggregate_failures do
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq(file_content)
+ expect(result.path).to eq(file_path)
+ expect(path.host).to eq(current_host)
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(project.commit('master').id)
+ end
+ end
+
+ it_behaves_like 'fetches the component content'
+
+ context 'when feature flag ci_redirect_component_project is disabled' do
+ before do
+ stub_feature_flags(ci_redirect_component_project: false)
+ end
+
+ it_behaves_like 'fetches the component content'
+ end
+
+ context 'when the there is a redirect set for the project' do
+ let!(:redirect_route) { project.redirect_routes.create!(path: 'another-group/new-project') }
+ let(:project_path) { redirect_route.path }
+
+ it_behaves_like 'fetches the component content'
+
+ context 'when feature flag ci_redirect_component_project is disabled' do
+ before do
+ stub_feature_flags(ci_redirect_component_project: false)
+ end
+
+ it_behaves_like 'does not find the component'
+ end
+ end
+ end
+
+ context 'when the component is simple (single file template)' do
+ it_behaves_like 'finds the component' do
+ let(:file_path) { 'templates/secret-detection.yml' }
+ let(:file_content) { 'image: alpine_1' }
end
end
context 'when the component is complex (directory-based template)' do
let(:address) { "acme.com/#{project_path}/dast@#{version}" }
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine_2')
- expect(result.path).to eq('templates/dast/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
+ it_behaves_like 'finds the component' do
+ let(:file_path) { 'templates/dast/template.yml' }
+ let(:file_content) { 'image: alpine_2' }
end
context 'when there is an invalid nested component folder' do
let(:address) { "acme.com/#{project_path}/dast/another-folder@#{version}" }
- it 'returns nil' do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to be_nil
- end
+ it_behaves_like 'does not find the component'
end
context 'when there is an invalid nested component path' do
let(:address) { "acme.com/#{project_path}/dast/another-template@#{version}" }
- it 'returns nil' do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to be_nil
- end
+ it_behaves_like 'does not find the component'
end
end
+ context "when the project path starts with '/'" do
+ let(:project_path) { "/#{project.full_path}" }
+
+ it_behaves_like 'does not find the component'
+ end
+
+ # TODO: remove when deleting the feature flag `ci_redirect_component_project`
shared_examples 'prevents infinite loop' do |prefix|
context "when the project path starts with '#{prefix}'" do
let(:project_path) { "#{prefix}#{project.full_path}" }
@@ -127,7 +165,7 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
released_at: Time.zone.now)
end
- it 'fetches the component content', :aggregate_failures do
+ it 'returns the component content of the latest project release', :aggregate_failures do
result = path.fetch_content!(current_user: user)
expect(result.content).to eq('image: alpine_2')
expect(result.path).to eq('templates/secret-detection.yml')
@@ -135,6 +173,25 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
expect(path.project).to eq(project)
expect(path.sha).to eq(latest_sha)
end
+
+ context 'when the project is a catalog resource' do
+ let_it_be(:resource) { create(:ci_catalog_resource, project: project) }
+
+ before do
+ project.releases.each do |release|
+ create(:ci_catalog_resource_version, catalog_resource: resource, release: release)
+ end
+ end
+
+ it 'returns the component content of the latest catalog resource version', :aggregate_failures do
+ result = path.fetch_content!(current_user: user)
+ expect(result.content).to eq('image: alpine_2')
+ expect(result.path).to eq('templates/secret-detection.yml')
+ expect(path.host).to eq(current_host)
+ expect(path.project).to eq(project)
+ expect(path.sha).to eq(latest_sha)
+ end
+ end
end
context 'when version does not exist' do
@@ -162,88 +219,5 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
end
end
end
-
- # All the following tests are for deprecated code and will be removed
- # in https://gitlab.com/gitlab-org/gitlab/-/issues/415855
- context 'when the project does not contain a templates directory' do
- let(:project_path) { project.full_path }
- let(:address) { "acme.com/#{project_path}/component@#{version}" }
-
- let_it_be(:project) do
- create(
- :project, :custom_repo,
- files: {
- 'component/template.yml' => 'image: alpine'
- }
- )
- end
-
- before do
- project.add_developer(user)
- end
-
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine')
- expect(result.path).to eq('component/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
- end
-
- context 'when project path is nested under a subgroup' do
- let_it_be(:group) { create(:group, :nested) }
- let_it_be(:project) do
- create(
- :project, :custom_repo,
- files: {
- 'component/template.yml' => 'image: alpine'
- },
- group: group
- )
- end
-
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine')
- expect(result.path).to eq('component/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
- end
- end
-
- context 'when current GitLab instance is installed on a relative URL' do
- let(:address) { "acme.com/gitlab/#{project_path}/component@#{version}" }
- let(:current_host) { 'acme.com/gitlab/' }
-
- it 'fetches the component content', :aggregate_failures do
- result = path.fetch_content!(current_user: user)
- expect(result.content).to eq('image: alpine')
- expect(result.path).to eq('component/template.yml')
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to eq(project.commit('master').id)
- end
- end
-
- context 'when version does not exist' do
- let(:version) { 'non-existent' }
-
- it 'returns nil', :aggregate_failures do
- expect(path.fetch_content!(current_user: user)).to be_nil
- expect(path.host).to eq(current_host)
- expect(path.project).to eq(project)
- expect(path.sha).to be_nil
- end
- end
-
- context 'when user does not have permissions' do
- it 'raises an error when fetching the content' do
- expect { path.fetch_content!(current_user: build(:user)) }
- .to raise_error(Gitlab::Access::AccessDeniedError)
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
new file mode 100644
index 00000000000..bdd66cc00a1
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::AutoCancel, feature_category: :pipeline_composition do
+ subject(:config) { described_class.new(config_hash) }
+
+ context 'with on_new_commit' do
+ let(:config_hash) do
+ { on_new_commit: 'interruptible' }
+ end
+
+ it { is_expected.to be_valid }
+
+ it 'returns value correctly' do
+ expect(config.value).to eq(config_hash)
+ end
+
+ context 'when on_new_commit is invalid' do
+ let(:config_hash) do
+ { on_new_commit: 'invalid' }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(config.errors)
+ .to include('auto cancel on new commit must be one of: conservative, interruptible, disabled')
+ end
+ end
+ end
+
+ context 'with on_job_failure' do
+ ['all', 'none', nil].each do |value|
+ context 'when the `on_job_failure` value is valid' do
+ let(:config_hash) { { on_job_failure: value } }
+
+ it { is_expected.to be_valid }
+
+ it 'returns value correctly' do
+ expect(config.value).to eq(on_job_failure: value)
+ end
+ end
+ end
+
+ context 'when on_job_failure is invalid' do
+ let(:config_hash) do
+ { on_job_failure: 'invalid' }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(config.errors)
+ .to include('auto cancel on job failure must be one of: none, all')
+ end
+ end
+ end
+
+ context 'with invalid key' do
+ let(:config_hash) do
+ { invalid: 'interruptible' }
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(config.errors)
+ .to include('auto cancel config contains unknown keys: invalid')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 6e6b9d949c5..35f2a99ee87 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
+RSpec.describe Gitlab::Ci::Config::Entry::Bridge, feature_category: :continuous_integration do
subject(:entry) { described_class.new(config, name: :my_bridge) }
it_behaves_like 'with inheritable CI config' do
+ let(:config) { { trigger: 'some/project' } }
let(:inheritable_key) { 'default' }
let(:inheritable_class) { Gitlab::Ci::Config::Entry::Default }
@@ -13,9 +14,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
# that we know that we don't want to inherit
# as they do not have sense in context of Bridge
let(:ignored_inheritable_columns) do
- %i[before_script after_script hooks image services cache interruptible timeout
+ %i[before_script after_script hooks image services cache timeout
retry tags artifacts id_tokens]
end
+
+ before do
+ allow(entry).to receive_message_chain(:inherit_entry, :default_entry, :inherit?).and_return(true)
+ end
end
describe '.matching?' do
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index 17c45ec4c2c..99a6e25b313 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -42,6 +42,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
+ describe '#executor_opts' do
+ it "returns nil" do
+ expect(entry.executor_opts).to be_nil
+ end
+ end
+
describe '#ports' do
it "returns image's ports" do
expect(entry.ports).to be_nil
@@ -88,6 +94,54 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
+ context 'when configuration specifies docker' do
+ let(:config) { { name: 'image:1.0', docker: {} } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: {}
+ }
+ )
+ end
+ end
+
+ context "when docker specifies an option" do
+ let(:config) { { name: 'image:1.0', docker: { platform: 'amd64' } } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: { platform: 'amd64' }
+ }
+ )
+ end
+ end
+ end
+
+ context "when docker specifies an invalid option" do
+ let(:config) { { name: 'image:1.0', docker: { platform: 1 } } }
+
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{image executor opts '/docker/platform' must be a valid 'string'}
+ end
+ end
+ end
+
context 'when configuration has ports' do
let(:ports) { [{ number: 80, protocol: 'http', name: 'foobar' }] }
let(:config) { { name: 'image:1.0', entrypoint: %w[/bin/sh run], ports: ports } }
@@ -146,7 +200,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
describe '#errors' do
it 'saves errors' do
expect(entry.errors.first)
- .to match /config should be a hash or a string/
+ .to match(/config should be a hash or a string/)
end
end
@@ -163,7 +217,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
describe '#errors' do
it 'saves errors' do
expect(entry.errors.first)
- .to match /config contains unknown keys: non_existing/
+ .to match(/config contains unknown keys: non_existing/)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/includes_spec.rb b/spec/lib/gitlab/ci/config/entry/includes_spec.rb
index f1f28c24e70..54c02868010 100644
--- a/spec/lib/gitlab/ci/config/entry/includes_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/includes_spec.rb
@@ -13,4 +13,18 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Includes, feature_category: :pipelin
2.times { expect { described_class.new(config) }.not_to change { described_class.aspects.count } }
end
end
+
+ describe 'validations' do
+ let(:config) { [1, 2] }
+
+ let(:includes_entry) { described_class.new(config, max_size: 1) }
+
+ it 'returns invalid' do
+ expect(includes_entry).not_to be_valid
+ end
+
+ it 'returns the appropriate error' do
+ expect(includes_entry.errors).to include('includes config is too long (maximum is 1)')
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb b/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
index 7cd9b0acb99..c0d21385ce6 100644
--- a/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/inherit/default_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Inherit::Default do
false | false
%w[image] | true
%w[before_script] | false
+ '123' | false
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 24d3cac6616..073d8feaadd 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -789,7 +789,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
hooks: { pre_get_sources_script: 'echo hello' } }
end
- it 'returns correct value' do
+ it 'returns correct values' do
expect(entry.value).to eq(
name: :rspec,
before_script: %w[ls pwd],
@@ -806,6 +806,93 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo
)
end
end
+
+ context 'with retry present in the config' do
+ let(:config) do
+ {
+ script: 'rspec',
+ retry: { max: 1, when: "always" }
+ }
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ retry: { max: 1, when: %w[always] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+
+ context 'when ci_retry_on_exit_codes feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_retry_on_exit_codes: false)
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ retry: { max: 1, when: %w[always] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+ end
+
+ context 'with exit_codes present' do
+ let(:config) do
+ {
+ script: 'rspec',
+ retry: { max: 1, when: "always", exit_codes: 255 }
+ }
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ retry: { max: 1, when: %w[always], exit_codes: [255] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+
+ context 'when ci_retry_on_exit_codes feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_retry_on_exit_codes: false)
+ end
+
+ it 'returns correct values' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ script: %w[rspec],
+ stage: 'test',
+ ignore: false,
+ # Shouldn't include exit_codes
+ retry: { max: 1, when: %w[always] },
+ only: { refs: %w[branches tags] },
+ job_variables: {},
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ end
+ end
+ end
+ end
end
context 'when job is using tags' do
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index 44e2fdbac37..84a8fd827cb 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -217,6 +217,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable, feature_category: :pipeli
end
end
end
+
+ context 'when interruptible is not a boolean' do
+ let(:config) { { interruptible: 123 } }
+
+ it 'returns error about wrong value type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include "interruptible config should be a boolean value"
+ end
+ end
end
describe '#relevant?' do
@@ -462,6 +471,28 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable, feature_category: :pipeli
end
end
end
+
+ context 'with interruptible' do
+ context 'when interruptible is not defined' do
+ let(:config) { { script: 'ls' } }
+
+ it 'sets interruptible to nil' do
+ entry.compose!(deps)
+
+ expect(entry.value[:interruptible]).to be_nil
+ end
+ end
+
+ context 'when interruptible is defined' do
+ let(:config) { { script: 'ls', interruptible: true } }
+
+ it 'sets interruptible to the value' do
+ entry.compose!(deps)
+
+ expect(entry.value[:interruptible]).to eq(true)
+ end
+ end
+ end
end
context 'when composed' do
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index d610c3ce2f6..a6675229c62 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -49,6 +49,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports, feature_category: :pipeline_c
:accessibility | 'gl-accessibility.json'
:cyclonedx | 'gl-sbom.cdx.zip'
:annotations | 'gl-annotations.json'
+ :repository_xray | 'gl-repository-xray.json'
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/retry_spec.rb b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
index 84ef5344a8b..e01b50c5fbd 100644
--- a/spec/lib/gitlab/ci/config/entry/retry_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
@@ -11,8 +11,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
shared_context 'when retry value is a hash', :hash do
- let(:config) { { max: max, when: public_send(:when) }.compact }
+ let(:config) { { max: max, when: public_send(:when), exit_codes: public_send(:exit_codes) }.compact }
let(:when) {}
+ let(:exit_codes) {}
let(:max) {}
end
@@ -43,6 +44,44 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
expect(value).to eq(when: %w[unknown_failure runner_system_failure])
end
end
+
+ context 'and `exit_codes` is an integer' do
+ let(:exit_codes) { 255 }
+
+ it 'returns an array of exit_codes' do
+ expect(value).to eq(exit_codes: [255])
+ end
+ end
+
+ context 'and `exit_codes` is an array' do
+ let(:exit_codes) { [255, 142] }
+
+ it 'returns an array of exit_codes' do
+ expect(value).to eq(exit_codes: [255, 142])
+ end
+ end
+ end
+
+ context 'when ci_retry_on_exit_codes feature flag is disabled', :hash do
+ before do
+ stub_feature_flags(ci_retry_on_exit_codes: false)
+ end
+
+ context 'when `exit_codes` is an integer' do
+ let(:exit_codes) { 255 }
+
+ it 'deletes the attribute exit_codes' do
+ expect(value).to eq({})
+ end
+ end
+
+ context 'when `exit_codes` is an array' do
+ let(:exit_codes) { [255, 137] }
+
+ it 'deletes the attribute exit_codes' do
+ expect(value).to eq({})
+ end
+ end
end
end
@@ -65,6 +104,22 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
end
+ context 'with numeric exit_codes' do
+ let(:exit_codes) { 255 }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'with hash values exit_codes' do
+ let(:exit_codes) { [255, 142] }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
context 'with string when' do
let(:when) { 'unknown_failure' }
@@ -202,7 +257,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
end
- context 'iwth max too high' do
+ context 'with max too high' do
let(:max) { 10 }
it 'returns error about value too high' do
@@ -211,6 +266,33 @@ RSpec.describe Gitlab::Ci::Config::Entry::Retry do
end
end
+ context 'with exit_codes in wrong format' do
+ let(:exit_codes) { true }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'retry exit codes should be an array of integers or an integer'
+ end
+ end
+
+ context 'with exit_codes in wrong array format' do
+ let(:exit_codes) { ['string 1', 'string 2'] }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'retry exit codes should be an array of integers or an integer'
+ end
+ end
+
+ context 'with exit_codes in wrong mixed array format' do
+ let(:exit_codes) { [255, '155'] }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'retry exit codes should be an array of integers or an integer'
+ end
+ end
+
context 'with when in wrong format' do
let(:when) { true }
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 1f935bebed5..82747e7b521 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -47,11 +47,23 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
expect(entry.ports).to be_nil
end
end
+
+ describe '#executor_opts' do
+ it "returns service's executor_opts configuration" do
+ expect(entry.executor_opts).to be_nil
+ end
+ end
end
context 'when configuration is a hash' do
let(:config) do
- { name: 'postgresql:9.5', alias: 'db', command: %w[cmd run], entrypoint: %w[/bin/sh run] }
+ {
+ name: 'postgresql:9.5',
+ alias: 'db',
+ command: %w[cmd run],
+ entrypoint: %w[/bin/sh run],
+ variables: { 'MY_VAR' => 'variable' }
+ }
end
describe '#valid?' do
@@ -141,6 +153,51 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
end
end
+ context 'when configuration has docker options' do
+ let(:config) { { name: 'postgresql:9.5', docker: { platform: 'amd64' } } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5',
+ executor_opts: {
+ docker: { platform: 'amd64' }
+ }
+ )
+ end
+ end
+ end
+
+ context 'when docker options have an invalid property' do
+ let(:config) { { name: 'postgresql:9.5', docker: { invalid: 'option' } } }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{service executor opts '/docker/invalid' must be a valid 'schema'}
+ end
+ end
+ end
+
+ context 'when docker options platform is not string' do
+ let(:config) { { name: 'postgresql:9.5', docker: { platform: 123 } } }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{service executor opts '/docker/platform' must be a valid 'string'}
+ end
+ end
+ end
+
context 'when configuration has pull_policy' do
let(:config) { { name: 'postgresql:9.5', pull_policy: 'if-not-present' } }
diff --git a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
index 97ac199f47d..d3ce3ffe641 100644
--- a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
@@ -2,13 +2,12 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
- let(:factory) { Gitlab::Config::Entry::Factory.new(described_class).value(rules_hash) }
- let(:config) { factory.create! }
+RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_composition do
+ subject(:config) { described_class.new(workflow_hash) }
describe 'validations' do
context 'when work config value is a string' do
- let(:rules_hash) { 'build' }
+ let(:workflow_hash) { 'build' }
describe '#valid?' do
it 'is invalid' do
@@ -22,13 +21,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
describe '#value' do
it 'returns the invalid configuration' do
- expect(config.value).to eq(rules_hash)
+ expect(config.value).to eq(workflow_hash)
end
end
end
context 'when work config value is a hash' do
- let(:rules_hash) { { rules: [{ if: '$VAR' }] } }
+ let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
describe '#valid?' do
it 'is valid' do
@@ -42,12 +41,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
describe '#value' do
it 'returns the config' do
- expect(config.value).to eq(rules_hash)
+ expect(config.value).to eq(workflow_hash)
end
end
context 'with an invalid key' do
- let(:rules_hash) { { trash: [{ if: '$VAR' }] } }
+ let(:workflow_hash) { { trash: [{ if: '$VAR' }] } }
describe '#valid?' do
it 'is invalid' do
@@ -61,64 +60,79 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
describe '#value' do
it 'returns the invalid configuration' do
- expect(config.value).to eq(rules_hash)
+ expect(config.value).to eq(workflow_hash)
end
end
end
+ end
+ end
- context 'with workflow name' do
- let(:factory) { Gitlab::Config::Entry::Factory.new(described_class).value(workflow_hash) }
+ describe '.default' do
+ it 'is nil' do
+ expect(described_class.default).to be_nil
+ end
+ end
- context 'with a blank name' do
- let(:workflow_hash) do
- { name: '' }
- end
+ context 'with workflow name' do
+ context 'with a blank name' do
+ let(:workflow_hash) do
+ { name: '' }
+ end
- it 'is invalid' do
- expect(config).not_to be_valid
- end
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
- it 'returns error about invalid name' do
- expect(config.errors).to include('workflow name is too short (minimum is 1 character)')
- end
- end
+ it 'returns error about invalid name' do
+ expect(config.errors).to include('workflow name is too short (minimum is 1 character)')
+ end
+ end
- context 'with too long name' do
- let(:workflow_hash) do
- { name: 'a' * 256 }
- end
+ context 'with too long name' do
+ let(:workflow_hash) do
+ { name: 'a' * 256 }
+ end
- it 'is invalid' do
- expect(config).not_to be_valid
- end
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
- it 'returns error about invalid name' do
- expect(config.errors).to include('workflow name is too long (maximum is 255 characters)')
- end
- end
+ it 'returns error about invalid name' do
+ expect(config.errors).to include('workflow name is too long (maximum is 255 characters)')
+ end
+ end
- context 'when name is nil' do
- let(:workflow_hash) { { name: nil } }
+ context 'when name is nil' do
+ let(:workflow_hash) { { name: nil } }
- it 'is valid' do
- expect(config).to be_valid
- end
- end
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+ end
- context 'when name is not provided' do
- let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
+ context 'when name is not provided' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
- it 'is valid' do
- expect(config).to be_valid
- end
- end
+ it 'is valid' do
+ expect(config).to be_valid
end
end
end
- describe '.default' do
- it 'is nil' do
- expect(described_class.default).to be_nil
+ context 'with auto_cancel' do
+ let(:workflow_hash) do
+ {
+ auto_cancel: {
+ on_new_commit: 'interruptible',
+ on_job_failure: 'none'
+ }
+ }
+ end
+
+ it { is_expected.to be_valid }
+
+ it 'returns value correctly' do
+ expect(config.value).to eq(workflow_hash)
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 0643bf0c046..b961ee0d190 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -269,8 +269,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip
context_sha: sha,
type: :local,
location: 'lib/gitlab/ci/templates/existent-file.yml',
- blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
- raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
+ blob: "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/blob/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
+ raw: "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/raw/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
extra: {}
)
}
diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
index f8d3d1019f5..7293e640112 100644
--- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
@@ -75,7 +75,9 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
context 'with a timeout' do
before do
- allow(Gitlab::HTTP).to receive(:get).and_raise(Timeout::Error)
+ allow_next_instance_of(HTTParty::Request) do |instance|
+ allow(instance).to receive(:perform).and_raise(Timeout::Error)
+ end
end
it { is_expected.to be_falsy }
@@ -94,24 +96,33 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
end
- describe "#content" do
+ # When the FF ci_parallel_remote_includes is removed,
+ # convert this `shared_context` to `describe` and remove `rubocop:disable`.
+ shared_context "#content" do # rubocop:disable RSpec/ContextWording -- This is temporary until the FF is removed.
+ subject(:content) do
+ remote_file.preload_content
+ remote_file.content
+ end
+
context 'with a valid remote file' do
before do
stub_full_request(location).to_return(body: remote_file_content)
end
it 'returns the content of the file' do
- expect(remote_file.content).to eql(remote_file_content)
+ expect(content).to eql(remote_file_content)
end
end
context 'with a timeout' do
before do
- allow(Gitlab::HTTP).to receive(:get).and_raise(Timeout::Error)
+ allow_next_instance_of(HTTParty::Request) do |instance|
+ allow(instance).to receive(:perform).and_raise(Timeout::Error)
+ end
end
it 'is falsy' do
- expect(remote_file.content).to be_falsy
+ expect(content).to be_falsy
end
end
@@ -123,7 +134,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
it 'is nil' do
- expect(remote_file.content).to be_nil
+ expect(content).to be_nil
end
end
@@ -131,11 +142,21 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
let(:location) { 'http://localhost:8080' }
it 'is nil' do
- expect(remote_file.content).to be_nil
+ expect(content).to be_nil
end
end
end
+ it_behaves_like "#content"
+
+ context 'when the FF ci_parallel_remote_includes is disabled' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: false)
+ end
+
+ it_behaves_like "#content"
+ end
+
describe "#error_message" do
subject(:error_message) do
Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([remote_file])
@@ -234,13 +255,18 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
describe '#to_hash' do
+ subject(:to_hash) do
+ remote_file.preload_content
+ remote_file.to_hash
+ end
+
before do
stub_full_request(location).to_return(body: remote_file_content)
end
context 'with a valid remote file' do
it 'returns the content as a hash' do
- expect(remote_file.to_hash).to eql(
+ expect(to_hash).to eql(
before_script: ["apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs",
"ruby -v",
"which ruby",
@@ -260,7 +286,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
end
it 'returns the content as a hash' do
- expect(remote_file.to_hash).to eql(
+ expect(to_hash).to eql(
include: [
{ local: 'another-file.yml',
rules: [{ exists: ['Dockerfile'] }] }
@@ -293,7 +319,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
it 'returns the content as a hash' do
expect(remote_file).to be_valid
- expect(remote_file.to_hash).to eql(
+ expect(to_hash).to eql(
include: [
{ local: 'some-file.yml',
rules: [{ exists: ['Dockerfile'] }] }
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 5f28b45496f..d67b0ff8895 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -85,7 +85,13 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline
an_instance_of(Gitlab::Ci::Config::External::File::Remote))
end
- it_behaves_like 'logging config file fetch', 'config_file_fetch_remote_content_duration_s', 1
+ context 'when the FF ci_parallel_remote_includes is disabled' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: false)
+ end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_remote_content_duration_s', 1
+ end
end
context 'when the key is a remote file hash' do
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 68cdf56f198..4684495fa26 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -410,7 +410,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel
let(:other_project_files) do
{
- '/component-x/template.yml' => <<~YAML
+ '/templates/component-x/template.yml' => <<~YAML
component_x_job:
script: echo Component X
YAML
diff --git a/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb b/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
index 30036ee68ed..b0a514cb1e2 100644
--- a/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/inputs/base_input_spec.rb
@@ -4,8 +4,34 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Interpolation::Inputs::BaseInput, feature_category: :pipeline_composition do
describe '.matches?' do
- it 'is not implemented' do
- expect { described_class.matches?(double) }.to raise_error(NotImplementedError)
+ context 'when given is a hash' do
+ before do
+ stub_const('TestInput', Class.new(described_class))
+
+ TestInput.class_eval do
+ def self.type_name
+ 'test'
+ end
+ end
+ end
+
+ context 'when the spec type matches the input type' do
+ it 'returns true' do
+ expect(TestInput.matches?({ type: 'test' })).to be_truthy
+ end
+ end
+
+ context 'when the spec type does not match the input type' do
+ it 'returns false' do
+ expect(TestInput.matches?({ type: 'string' })).to be_falsey
+ end
+ end
+ end
+
+ context 'when not given a hash' do
+ it 'returns false' do
+ expect(described_class.matches?([])).to be_falsey
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb b/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb
new file mode 100644
index 00000000000..70858c0fff8
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb
@@ -0,0 +1,221 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::TextInterpolator, feature_category: :pipeline_composition do
+ let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(config: [header, content]) }
+
+ subject(:interpolator) { described_class.new(result, arguments, []) }
+
+ context 'when input data is valid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'correctly interpolates the config' do
+ interpolator.interpolate!
+
+ expect(interpolator).to be_interpolated
+ expect(interpolator).to be_valid
+ expect(interpolator.to_result).to eq("test: 'deploy gitlab.com'")
+ end
+ end
+
+ context 'when config has a syntax error' do
+ let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(error: 'Invalid configuration format') }
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'surfaces an error about invalid config' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('Invalid configuration format')
+ end
+ end
+
+ context 'when spec header is missing but inputs are specified' do
+ let(:header) { nil }
+ let(:content) { "test: 'echo'" }
+ let(:arguments) { { foo: 'bar' } }
+
+ it 'surfaces an error about invalid inputs' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq(
+ 'Given inputs not defined in the `spec` section of the included configuration file'
+ )
+ end
+ end
+
+ context 'when spec header is invalid' do
+ let(:header) do
+ { spec: { arguments: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'surfaces an error about invalid header' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('header:spec config contains unknown keys: arguments')
+ end
+ end
+
+ context 'when provided interpolation argument is invalid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: ['gitlab.com'] }
+ end
+
+ it 'returns an error about the invalid argument' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('`website` input: provided value is not a string')
+ end
+ end
+
+ context 'when interpolation block is invalid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.abc ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'returns an error about the invalid block' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('unknown interpolation key: `abc`')
+ end
+ end
+
+ context 'when multiple interpolation blocks are invalid' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.something.abc ]] $[[ inputs.cde ]] $[[ efg ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'stops execution after the first invalid block' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq('unknown interpolation key: `something`')
+ end
+ end
+
+ context 'when there are many invalid arguments' do
+ let(:header) do
+ { spec: { inputs: {
+ allow_failure: { type: 'boolean' },
+ image: nil,
+ parallel: { type: 'number' },
+ website: nil
+ } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]] $[[ inputs.parallel ]] $[[ inputs.allow_failure ]] $[[ inputs.image ]]'"
+ end
+
+ let(:arguments) do
+ { allow_failure: 'no', parallel: 'yes', website: 8 }
+ end
+
+ it 'reports a maximum of 3 errors in the error message' do
+ interpolator.interpolate!
+
+ expect(interpolator).not_to be_valid
+ expect(interpolator.error_message).to eq(
+ '`allow_failure` input: provided value is not a boolean, ' \
+ '`image` input: required value has not been provided, ' \
+ '`parallel` input: provided value is not a number'
+ )
+ expect(interpolator.errors).to contain_exactly(
+ '`allow_failure` input: provided value is not a boolean',
+ '`image` input: required value has not been provided',
+ '`parallel` input: provided value is not a number',
+ '`website` input: provided value is not a string'
+ )
+ end
+ end
+
+ describe '#to_result' do
+ context 'when interpolation is not used' do
+ let(:result) do
+ ::Gitlab::Ci::Config::Yaml::Result.new(config: content)
+ end
+
+ let(:content) do
+ "test: 'deploy production'"
+ end
+
+ let(:arguments) { nil }
+
+ it 'returns original content' do
+ interpolator.interpolate!
+
+ expect(interpolator.to_result).to eq(content)
+ end
+ end
+
+ context 'when interpolation is available' do
+ let(:header) do
+ { spec: { inputs: { website: nil } } }
+ end
+
+ let(:content) do
+ "test: 'deploy $[[ inputs.website ]]'"
+ end
+
+ let(:arguments) do
+ { website: 'gitlab.com' }
+ end
+
+ it 'correctly interpolates content' do
+ interpolator.interpolate!
+
+ expect(interpolator.to_result).to eq("test: 'deploy gitlab.com'")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb b/spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb
new file mode 100644
index 00000000000..a2f98fc0d5d
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/interpolation/text_template_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Interpolation::TextTemplate, feature_category: :pipeline_composition do
+ subject(:template) { described_class.new(config, ctx) }
+
+ let(:config) do
+ <<~CFG
+ test:
+ spec:
+ env: $[[ inputs.env ]]
+
+ $[[ inputs.key ]]:
+ name: $[[ inputs.key ]]
+ script: my-value
+ parallel: $[[ inputs.parallel ]]
+ CFG
+ end
+
+ let(:ctx) do
+ { inputs: { env: 'dev', key: 'abc', parallel: 6 } }
+ end
+
+ it 'interpolates the values properly' do
+ expect(template.interpolated).to eq <<~RESULT
+ test:
+ spec:
+ env: dev
+
+ abc:
+ name: abc
+ script: my-value
+ parallel: 6
+ RESULT
+ end
+
+ context 'when the config has an unknown interpolation key' do
+ let(:config) { '$[[ xxx.yyy ]]: abc' }
+
+ it 'does not interpolate the config' do
+ expect(template).not_to be_valid
+ expect(template.interpolated).to be_nil
+ expect(template.errors).to contain_exactly('unknown interpolation key: `xxx`')
+ end
+ end
+
+ context 'when template consists of nested arrays with hashes and values' do
+ let(:config) do
+ <<~CFG
+ test:
+ - a-$[[ inputs.key ]]-b
+ - c-$[[ inputs.key ]]-d:
+ d-$[[ inputs.key ]]-e
+ val: 1
+ CFG
+ end
+
+ it 'performs a valid interpolation' do
+ result = <<~RESULT
+ test:
+ - a-abc-b
+ - c-abc-d:
+ d-abc-e
+ val: 1
+ RESULT
+
+ expect(template).to be_valid
+ expect(template.interpolated).to eq result
+ end
+ end
+
+ context 'when template contains symbols that need interpolation' do
+ subject(:template) do
+ described_class.new("'$[[ inputs.key ]]': 'cde'", ctx)
+ end
+
+ it 'performs a valid interpolation' do
+ expect(template).to be_valid
+ expect(template.interpolated).to eq("'abc': 'cde'")
+ end
+ end
+
+ context 'when template is too large' do
+ before do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 1)
+ end
+
+ it 'returns an error' do
+ expect(template.interpolated).to be_nil
+ expect(template.errors).to contain_exactly('config too large')
+ end
+ end
+
+ context 'when there are too many interpolation blocks' do
+ before do
+ stub_const("#{described_class}::MAX_BLOCKS", 1)
+ end
+
+ it 'returns an error' do
+ expect(template.interpolated).to be_nil
+ expect(template.errors).to contain_exactly('too many interpolation blocks')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index fdf152b3584..76be65d91c4 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -43,6 +43,34 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_composition do
expect(config.to_hash).to eq hash
end
+ context 'when yml has stages' do
+ let(:yml) do
+ <<-EOS
+ image: image:1.0
+ stages:
+ - custom_stage
+ rspec:
+ script:
+ - gem install rspec
+ - rspec
+ EOS
+ end
+
+ specify do
+ expect(config.to_hash[:stages]).to eq(['.pre', 'custom_stage', '.post'])
+ end
+
+ context 'with inject_edge_stages option disabled' do
+ let(:config) do
+ described_class.new(yml, project: nil, pipeline: nil, sha: nil, user: nil, inject_edge_stages: false)
+ end
+
+ specify do
+ expect(config.to_hash[:stages]).to contain_exactly('custom_stage')
+ end
+ end
+ end
+
describe '#valid?' do
it 'is valid' do
expect(config).to be_valid
diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb
index c2ced10620b..1093e6331cd 100644
--- a/spec/lib/gitlab/ci/jwt_v2_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::JwtV2, feature_category: :continuous_integration do
+RSpec.describe Gitlab::Ci::JwtV2, feature_category: :secrets_management do
let(:namespace) { build_stubbed(:namespace) }
let(:project) { build_stubbed(:project, namespace: namespace) }
let(:user) do
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
index a331af9a9ac..9c8402faf77 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
@@ -33,35 +33,27 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
allow(SecureRandom).to receive(:uuid).and_return(uuid)
end
- context 'when report JSON is invalid' do
- let(:raw_report_data) { '{ ' }
+ context 'when report is invalid' do
+ context 'when report JSON is invalid' do
+ let(:raw_report_data) { '{ ' }
- it 'handles errors and adds them to the report' do
- expect(report).to receive(:add_error).with(a_string_including("Report JSON is invalid:"))
+ it 'handles errors and adds them to the report' do
+ expect(report).to receive(:add_error).with(a_string_including("Report JSON is invalid:"))
- expect { parse! }.not_to raise_error
+ expect { parse! }.not_to raise_error
+ end
end
- end
-
- context 'when report uses an unsupported spec version' do
- let(:report_data) { base_report_data.merge({ 'specVersion' => '1.3' }) }
-
- it 'reports unsupported version as an error' do
- expect(report).to receive(:add_error).with("Unsupported CycloneDX spec version. Must be one of: 1.4")
- parse!
- end
- end
+ context 'when report does not conform to the CycloneDX schema' do
+ let(:report_valid?) { false }
+ let(:validator_errors) { %w[error1 error2] }
- context 'when report does not conform to the CycloneDX schema' do
- let(:report_valid?) { false }
- let(:validator_errors) { %w[error1 error2] }
+ it 'reports all errors returned by the validator' do
+ expect(report).to receive(:add_error).with("error1")
+ expect(report).to receive(:add_error).with("error2")
- it 'reports all errors returned by the validator' do
- expect(report).to receive(:add_error).with("error1")
- expect(report).to receive(:add_error).with("error2")
-
- parse!
+ parse!
+ end
end
end
@@ -109,25 +101,26 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
it 'adds each component, ignoring unused attributes' do
expect(report).to receive(:add_component)
- .with(
- an_object_having_attributes(
- name: "activesupport",
- version: "5.1.4",
- component_type: "library",
- purl: an_object_having_attributes(type: "gem")
- )
- )
+ .with(
+ an_object_having_attributes(
+ name: "activesupport",
+ version: "5.1.4",
+ component_type: "library",
+ purl: an_object_having_attributes(type: "gem")
+ )
+ )
expect(report).to receive(:add_component)
- .with(
- an_object_having_attributes(
- name: "byebug",
- version: "10.0.0",
- component_type: "library",
- purl: an_object_having_attributes(type: "gem")
- )
- )
+ .with(
+ an_object_having_attributes(
+ name: "byebug",
+ version: "10.0.0",
+ component_type: "library",
+ purl: an_object_having_attributes(type: "gem")
+ )
+ )
expect(report).to receive(:add_component)
- .with(an_object_having_attributes(name: "minimal-component", version: nil, component_type: "library"))
+ .with(an_object_having_attributes(name: "minimal-component", version: nil,
+ component_type: "library"))
parse!
end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
index acb7c122bcd..9422290761d 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
@@ -4,160 +4,116 @@ require "spec_helper"
RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator,
feature_category: :dependency_management do
- # Reports should be valid or invalid according to the specification at
- # https://cyclonedx.org/docs/1.4/json/
-
- subject(:validator) { described_class.new(report_data) }
-
- let_it_be(:required_attributes) do
+ let(:report_data) do
{
"bomFormat" => "CycloneDX",
- "specVersion" => "1.4",
+ "specVersion" => spec_version,
"version" => 1
}
end
- context "with minimally valid report" do
- let_it_be(:report_data) { required_attributes }
-
- it { is_expected.to be_valid }
- end
-
- context "when report has components" do
- let(:report_data) { required_attributes.merge({ "components" => components }) }
-
- context "with minimally valid components" do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport"
- },
- {
- "type" => "library",
- "name" => "byebug"
- }
- ]
- end
+ subject(:validator) { described_class.new(report_data) }
- it { is_expected.to be_valid }
+ shared_examples 'a validator that performs the expected validations' do
+ let(:required_attributes) do
+ {
+ "bomFormat" => "CycloneDX",
+ "specVersion" => spec_version,
+ "version" => 1
+ }
end
- context "when components have versions" do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport",
- "version" => "5.1.4"
- },
- {
- "type" => "library",
- "name" => "byebug",
- "version" => "10.0.0"
- }
- ]
- end
+ context "with minimally valid report" do
+ let(:report_data) { required_attributes }
it { is_expected.to be_valid }
end
- context 'when components have licenses' do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport",
- "version" => "5.1.4",
- "licenses" => [
- { "license" => { "id" => "MIT" } }
- ]
- }
- ]
- end
+ context "when report has components" do
+ let(:report_data) { required_attributes.merge({ "components" => components }) }
- it { is_expected.to be_valid }
- end
-
- context 'when components have a signature' do
- let(:components) do
- [
- {
- "type" => "library",
- "name" => "activesupport",
- "version" => "5.1.4",
- "signature" => {
- "algorithm" => "ES256",
- "publicKey" => {
- "kty" => "EC",
- "crv" => "P-256",
- "x" => "6BKxpty8cI-exDzCkh-goU6dXq3MbcY0cd1LaAxiNrU",
- "y" => "mCbcvUzm44j3Lt2b5BPyQloQ91tf2D2V-gzeUxWaUdg"
- },
- "value" => "ybT1qz5zHNi4Ndc6y7Zhamuf51IqXkPkZwjH1XcC-KSuBiaQplTw6Jasf2MbCLg3CF7PAdnMO__WSLwvI5r2jA"
+ context "with minimally valid components" do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport"
+ },
+ {
+ "type" => "library",
+ "name" => "byebug"
}
- }
- ]
- end
-
- it { is_expected.to be_valid }
- end
+ ]
+ end
- context "when components are not valid" do
- let(:components) do
- [
- { "type" => "foo" },
- { "name" => "activesupport" }
- ]
+ it { is_expected.to be_valid }
end
- it { is_expected.not_to be_valid }
-
- it "outputs errors for each validation failure" do
- expect(validator.errors).to match_array(
+ context "when components have versions" do
+ let(:components) do
[
- "property '/components/0' is missing required keys: name",
- "property '/components/0/type' is not one of: [\"application\", \"framework\"," \
- " \"library\", \"container\", \"operating-system\", \"device\", \"firmware\", \"file\"]",
- "property '/components/1' is missing required keys: type"
- ])
- end
- end
- end
-
- context "when report has metadata" do
- let(:metadata) do
- {
- "timestamp" => "2022-02-23T08:02:39Z",
- "tools" => [{ "vendor" => "GitLab", "name" => "Gemnasium", "version" => "2.34.0" }],
- "authors" => [{ "name" => "GitLab", "email" => "support@gitlab.com" }]
- }
- end
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4"
+ },
+ {
+ "type" => "library",
+ "name" => "byebug",
+ "version" => "10.0.0"
+ }
+ ]
+ end
- let(:report_data) { required_attributes.merge({ "metadata" => metadata }) }
+ it { is_expected.to be_valid }
+ end
- it { is_expected.to be_valid }
+ context 'when components have licenses' do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4",
+ "licenses" => [
+ { "license" => { "id" => "MIT" } }
+ ]
+ }
+ ]
+ end
- context "when metadata has properties" do
- before do
- metadata.merge!({ "properties" => properties })
+ it { is_expected.to be_valid }
end
- context "when properties are valid" do
- let(:properties) do
+ context 'when components have a signature' do
+ let(:components) do
[
- { "name" => "gitlab:dependency_scanning:input_file", "value" => "Gemfile.lock" },
- { "name" => "gitlab:dependency_scanning:package_manager", "value" => "bundler" }
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4",
+ "signature" => {
+ "algorithm" => "ES256",
+ "publicKey" => {
+ "kty" => "EC",
+ "crv" => "P-256",
+ "x" => "6BKxpty8cI-exDzCkh-goU6dXq3MbcY0cd1LaAxiNrU",
+ "y" => "mCbcvUzm44j3Lt2b5BPyQloQ91tf2D2V-gzeUxWaUdg"
+ },
+ "value" => "ybT1qz5zHNi4Ndc6y7Zhamuf51IqXkPkZwjH1XcC-KSuBiaQplTw6Jasf2MbCLg3CF7PAdnMO__WSLwvI5r2jA"
+ }
+ }
]
end
it { is_expected.to be_valid }
end
- context "when properties are invalid" do
- let(:properties) do
+ context "when components are not valid" do
+ let(:components) do
[
- { "name" => ["gitlab:meta:schema_version"], "value" => 1 }
+ { "type" => "foo" },
+ { "name" => "activesupport" }
]
end
@@ -166,11 +122,75 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator,
it "outputs errors for each validation failure" do
expect(validator.errors).to match_array(
[
- "property '/metadata/properties/0/name' is not of type: string",
- "property '/metadata/properties/0/value' is not of type: string"
+ "property '/components/0' is missing required keys: name",
+ a_string_starting_with("property '/components/0/type' is not one of:"),
+ "property '/components/1' is missing required keys: type"
])
end
end
end
+
+ context "when report has metadata" do
+ let(:metadata) do
+ {
+ "timestamp" => "2022-02-23T08:02:39Z",
+ "tools" => [{ "vendor" => "GitLab", "name" => "Gemnasium", "version" => "2.34.0" }],
+ "authors" => [{ "name" => "GitLab", "email" => "support@gitlab.com" }]
+ }
+ end
+
+ let(:report_data) { required_attributes.merge({ "metadata" => metadata }) }
+
+ it { is_expected.to be_valid }
+
+ context "when metadata has properties" do
+ before do
+ metadata.merge!({ "properties" => properties })
+ end
+
+ context "when properties are valid" do
+ let(:properties) do
+ [
+ { "name" => "gitlab:dependency_scanning:input_file", "value" => "Gemfile.lock" },
+ { "name" => "gitlab:dependency_scanning:package_manager", "value" => "bundler" }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context "when properties are invalid" do
+ let(:properties) do
+ [
+ { "name" => ["gitlab:meta:schema_version"], "value" => 1 }
+ ]
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it "outputs errors for each validation failure" do
+ expect(validator.errors).to match_array(
+ [
+ "property '/metadata/properties/0/name' is not of type: string",
+ "property '/metadata/properties/0/value' is not of type: string"
+ ])
+ end
+ end
+ end
+ end
+ end
+
+ context 'when spec version is supported' do
+ where(:spec_version) { %w[1.4 1.5] }
+
+ with_them do
+ it_behaves_like 'a validator that performs the expected validations'
+ end
+ end
+
+ context 'when spec version is not supported' do
+ let(:spec_version) { '1.3' }
+
+ it { is_expected.not_to be_valid }
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 31bffcbeb2a..00f834fcf80 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines, feature_category: :continuous_integration do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) }
let_it_be(:step) { described_class.new(pipeline, command) }
@@ -17,5 +17,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines, feature_cate
subject
end
+
+ context 'with scheduled pipelines' do
+ before do
+ pipeline.source = :schedule
+ end
+
+ it 'enqueues LowUrgencyCancelRedundantPipelinesWorker' do
+ expect(Ci::LowUrgencyCancelRedundantPipelinesWorker)
+ .to receive(:perform_async).with(pipeline.id)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
index eb5a37f19f4..44ccb1eeae1 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
@@ -12,10 +12,13 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
end
let(:step) { described_class.new(pipeline, command) }
+ let(:ff_always_set_pipeline_failure_reason) { true }
describe '#perform!' do
context 'when pipeline has been skipped by workflow configuration' do
before do
+ stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
+
allow(step).to receive(:workflow_rules_result)
.and_return(
double(pass?: false, variables: {})
@@ -39,6 +42,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
it 'saves workflow_rules_result' do
expect(command.workflow_rules_result.variables).to eq({})
end
+
+ it 'sets the failure reason', :aggregate_failures do
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_filtered_by_workflow_rules
+ end
+
+ context 'when always_set_pipeline_failure_reason is disabled' do
+ let(:ff_always_set_pipeline_failure_reason) { false }
+
+ it 'does not set the failure reason', :aggregate_failures do
+ expect(pipeline).not_to be_failed
+ expect(pipeline.failure_reason).to be_blank
+ end
+ end
end
context 'when pipeline has not been skipped by workflow configuration' do
@@ -67,6 +84,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
it 'saves workflow_rules_result' do
expect(command.workflow_rules_result.variables).to eq({ 'VAR1' => 'val2', 'VAR2' => 3 })
end
+
+ it 'does not set a failure reason' do
+ expect(pipeline).not_to be_filtered_by_workflow_rules
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
index 96ada90b4e1..84c2fb6525e 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuous_integration do
let(:helper_class) do
Class.new do
include Gitlab::Ci::Pipeline::Chain::Helpers
@@ -38,14 +38,35 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers do
describe '.error' do
shared_examples 'error function' do
specify do
- expect(pipeline).to receive(:drop!).with(drop_reason).and_call_original
expect(pipeline).to receive(:add_error_message).with(message).and_call_original
- expect(pipeline).to receive(:ensure_project_iid!).twice.and_call_original
+
+ if command.save_incompleted
+ expect(pipeline).to receive(:ensure_project_iid!).twice.and_call_original
+ expect(pipeline).to receive(:drop!).with(drop_reason).and_call_original
+ end
subject.error(message, config_error: config_error, drop_reason: drop_reason)
expect(pipeline.yaml_errors).to eq(yaml_error)
expect(pipeline.errors[:base]).to include(message)
+ expect(pipeline.status).to eq 'failed'
+ expect(pipeline.failure_reason).to eq drop_reason.to_s
+ end
+
+ context 'when feature flag always_set_pipeline_failure_reason is false' do
+ before do
+ stub_feature_flags(always_set_pipeline_failure_reason: false)
+ end
+
+ specify do
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ if command.save_incompleted
+ expect(pipeline.failure_reason).to eq drop_reason.to_s
+ else
+ expect(pipeline.failure_reason).not_to be_present
+ end
+ end
end
end
@@ -79,6 +100,43 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers do
let(:yaml_error) { nil }
it_behaves_like "error function"
+
+ specify do
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ expect(pipeline).to be_persisted
+ end
+
+ context ' when the drop reason is not persistable' do
+ let(:drop_reason) { :filtered_by_rules }
+ let(:command) { double(project: nil) }
+
+ specify do
+ expect(command).to receive(:increment_pipeline_failure_reason_counter)
+
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ expect(pipeline).to be_failed
+ expect(pipeline.failure_reason).to eq drop_reason.to_s
+ expect(pipeline).not_to be_persisted
+ end
+ end
+
+ context 'when save_incompleted is false' do
+ let(:command) { double(save_incompleted: false, project: nil) }
+
+ before do
+ allow(command).to receive(:increment_pipeline_failure_reason_counter)
+ end
+
+ it_behaves_like "error function"
+
+ specify do
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ expect(pipeline).not_to be_persisted
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
index 00200b57b1e..732748d8c8b 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category: :pipeline_composition do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -43,16 +43,28 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
- context 'with pipeline name' do
- let(:config) do
- { workflow: { name: ' Pipeline name ' }, rspec: { script: 'rspec' } }
- end
-
+ shared_examples 'not breaking the chain' do
it 'does not break the chain' do
run_chain
expect(step.break?).to be false
end
+ end
+
+ shared_examples 'not saving pipeline metadata' do
+ it 'does not save pipeline metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata).to be_nil
+ end
+ end
+
+ context 'with pipeline name' do
+ let(:config) do
+ { workflow: { name: ' Pipeline name ' }, rspec: { script: 'rspec' } }
+ end
+
+ it_behaves_like 'not breaking the chain'
it 'builds pipeline_metadata' do
run_chain
@@ -67,22 +79,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
{ workflow: { name: ' ' }, rspec: { script: 'rspec' } }
end
- it 'strips whitespace from name' do
- run_chain
-
- expect(pipeline.pipeline_metadata).to be_nil
- end
+ it_behaves_like 'not saving pipeline metadata'
context 'with empty name after variable substitution' do
let(:config) do
{ workflow: { name: '$VAR1' }, rspec: { script: 'rspec' } }
end
- it 'does not save empty name' do
- run_chain
-
- expect(pipeline.pipeline_metadata).to be_nil
- end
+ it_behaves_like 'not saving pipeline metadata'
end
end
@@ -127,4 +131,140 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
end
end
end
+
+ context 'with auto_cancel' do
+ let(:on_new_commit) { 'interruptible' }
+ let(:on_job_failure) { 'all' }
+ let(:auto_cancel) { { on_new_commit: on_new_commit, on_job_failure: on_job_failure } }
+ let(:config) { { workflow: { auto_cancel: auto_cancel }, rspec: { script: 'rspec' } } }
+
+ it_behaves_like 'not breaking the chain'
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+
+ context 'with no auto_cancel' do
+ let(:config) do
+ { rspec: { script: 'rspec' } }
+ end
+
+ it_behaves_like 'not saving pipeline metadata'
+ end
+
+ context 'with auto_cancel: nil' do
+ let(:auto_cancel) { nil }
+
+ it_behaves_like 'not saving pipeline metadata'
+ end
+
+ context 'with auto_cancel_on_new_commit and no auto_cancel_on_job_failure' do
+ let(:auto_cancel) { { on_new_commit: on_new_commit } }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'with auto_cancel_on_job_failure and no auto_cancel_on_new_commit' do
+ let(:auto_cancel) { { on_job_failure: on_job_failure } }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'with auto_cancel_on_new_commit: nil and auto_cancel_on_job_failure: nil' do
+ let(:on_new_commit) { nil }
+ let(:on_job_failure) { nil }
+
+ it_behaves_like 'not saving pipeline metadata'
+ end
+
+ context 'with auto_cancel_on_new_commit valid and auto_cancel_on_job_failure: nil' do
+ let(:on_job_failure) { nil }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'with auto_cancel_on_new_commit: nil and auto_cancel_on_job_failure valid' do
+ let(:on_new_commit) { nil }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'when auto_cancel_on_job_failure: none' do
+ let(:on_job_failure) { 'none' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+
+ context 'when auto_cancel_pipeline_on_job_failure feature is disabled' do
+ before do
+ stub_feature_flags(auto_cancel_pipeline_on_job_failure: false)
+ end
+
+ it 'ignores the auto_cancel_on_job_failure value' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
+ end
+
+ context 'with both pipeline name and auto_cancel' do
+ let(:config) do
+ {
+ workflow: {
+ name: 'Pipeline name',
+ auto_cancel: {
+ on_new_commit: 'interruptible',
+ on_job_failure: 'none'
+ }
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ it_behaves_like 'not breaking the chain'
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.name).to eq('Pipeline name')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 91bb94bbb11..476b1be35a9 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -34,12 +34,15 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
{ rspec: { script: 'rspec' } }
end
+ let(:ff_always_set_pipeline_failure_reason) { true }
+
def run_chain
dependencies.map(&:perform!)
step.perform!
end
before do
+ stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@@ -100,7 +103,27 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
it 'increments the error metric' do
counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
- expect { run_chain }.to change { counter.get(reason: 'unknown_failure') }.by(1)
+ expect { run_chain }.to change { counter.get(reason: 'filtered_by_rules') }.by(1)
+ end
+
+ it 'sets the failure reason without persisting the pipeline', :aggregate_failures do
+ run_chain
+
+ expect(pipeline).not_to be_persisted
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_filtered_by_rules
+ end
+
+ context 'when ff always_set_pipeline_failure_reason is disabled' do
+ let(:ff_always_set_pipeline_failure_reason) { false }
+
+ it 'sets the failure reason without persisting the pipeline', :aggregate_failures do
+ run_chain
+
+ expect(pipeline).not_to be_persisted
+ expect(pipeline).not_to be_failed
+ expect(pipeline).not_to be_filtered_by_rules
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index 52a00e0d501..4017076d29f 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External, feature_category: :continuous_integration do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, :with_sign_ins) }
@@ -328,11 +328,12 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
context 'when save_incompleted is false' do
let(:save_incompleted) { false }
- it 'adds errors to the pipeline without dropping it' do
+ it 'adds errors to the pipeline without persisting it', :aggregate_failures do
perform!
- expect(pipeline.status).to eq('pending')
expect(pipeline).not_to be_persisted
+ expect(pipeline.status).to eq('failed')
+ expect(pipeline).to be_external_validation_failure
expect(pipeline.errors.to_a).to include('External validation failed')
end
diff --git a/spec/lib/gitlab/ci/reports/sbom/source_spec.rb b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
index c1eaea511b7..09a601833ad 100644
--- a/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
+++ b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
@@ -5,47 +5,93 @@ require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Reports::Sbom::Source, feature_category: :dependency_management do
let(:attributes) do
{
- type: :dependency_scanning,
- data: {
- 'category' => 'development',
- 'input_file' => { 'path' => 'package-lock.json' },
- 'source_file' => { 'path' => 'package.json' },
- 'package_manager' => { 'name' => 'npm' },
- 'language' => { 'name' => 'JavaScript' }
- }
+ type: type,
+ data: { 'category' => 'development',
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' } }.merge(extra_attributes)
}
end
- subject { described_class.new(**attributes) }
+ subject(:source) { described_class.new(**attributes) }
- it 'has correct attributes' do
- expect(subject).to have_attributes(
- source_type: attributes[:type],
- data: attributes[:data]
- )
- end
+ shared_examples_for 'it has correct common attributes' do
+ it 'has correct type and data' do
+ expect(subject).to have_attributes(
+ source_type: type,
+ data: attributes[:data]
+ )
+ end
- describe '#source_file_path' do
- it 'returns the correct source_file_path' do
- expect(subject.source_file_path).to eq('package.json')
+ describe '#packager' do
+ it 'returns the correct package manager name' do
+ expect(subject.packager).to eq("npm")
+ end
end
- end
- describe '#input_file_path' do
- it 'returns the correct input_file_path' do
- expect(subject.input_file_path).to eq("package-lock.json")
+ describe '#language' do
+ it 'returns the correct language' do
+ expect(subject.language).to eq("JavaScript")
+ end
end
end
- describe '#packager' do
- it 'returns the correct package manager name' do
- expect(subject.packager).to eq("npm")
+ context 'when dependency scanning' do
+ let(:type) { :dependency_scanning }
+ let(:extra_attributes) do
+ {
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' }
+ }
+ end
+
+ it_behaves_like 'it has correct common attributes'
+
+ describe '#source_file_path' do
+ it 'returns the correct source_file_path' do
+ expect(subject.source_file_path).to eq('package.json')
+ end
+ end
+
+ describe '#input_file_path' do
+ it 'returns the correct input_file_path' do
+ expect(subject.input_file_path).to eq("package-lock.json")
+ end
end
end
- describe '#language' do
- it 'returns the correct langauge' do
- expect(subject.language).to eq("JavaScript")
+ context 'when container scanning' do
+ let(:type) { :container_scanning }
+ let(:extra_attributes) do
+ {
+ "image" => { "name" => "rhel", "tag" => "7.1" },
+ "operating_system" => { "name" => "Red Hat Enterprise Linux", "version" => "7" }
+ }
+ end
+
+ it_behaves_like 'it has correct common attributes'
+
+ describe "#image_name" do
+ subject { source.image_name }
+
+ it { is_expected.to eq("rhel") }
+ end
+
+ describe "#image_tag" do
+ subject { source.image_tag }
+
+ it { is_expected.to eq("7.1") }
+ end
+
+ describe "#operating_system_name" do
+ subject { source.operating_system_name }
+
+ it { is_expected.to eq("Red Hat Enterprise Linux") }
+ end
+
+ describe "#operating_system_version" do
+ subject { source.operating_system_version }
+
+ it { is_expected.to eq("7") }
end
end
end
diff --git a/spec/lib/gitlab/ci/runner_instructions_spec.rb b/spec/lib/gitlab/ci/runner_instructions_spec.rb
index 31c53d4a030..6da649393f3 100644
--- a/spec/lib/gitlab/ci/runner_instructions_spec.rb
+++ b/spec/lib/gitlab/ci/runner_instructions_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::RunnerInstructions, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Ci::RunnerInstructions, feature_category: :fleet_visibility do
using RSpec::Parameterized::TableSyntax
let(:params) { {} }
diff --git a/spec/lib/gitlab/ci/runner_releases_spec.rb b/spec/lib/gitlab/ci/runner_releases_spec.rb
index 9e211327dee..126a5b85471 100644
--- a/spec/lib/gitlab/ci/runner_releases_spec.rb
+++ b/spec/lib/gitlab/ci/runner_releases_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::RunnerReleases, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Ci::RunnerReleases, feature_category: :fleet_visibility do
subject { described_class.instance }
let(:runner_releases_url) { 'http://testurl.com/runner_public_releases' }
diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
index 526d6cba657..778c0aa69de 100644
--- a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
+++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::RunnerUpgradeCheck, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Ci::RunnerUpgradeCheck, feature_category: :fleet_visibility do
using RSpec::Parameterized::TableSyntax
subject(:instance) { described_class.new(gitlab_version, runner_releases) }
diff --git a/spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb b/spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb
new file mode 100644
index 00000000000..c16356bfda7
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/Diffblue_Cover_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Diffblue-Cover.gitlab-ci.yml', feature_category: :continuous_integration do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Diffblue-Cover') }
+
+ describe 'the created pipeline' do
+ let(:pipeline_branch) { 'patch-1' }
+ let_it_be(:project) { create(:project, :repository, create_branch: 'patch-1') }
+ let(:user) { project.first_owner }
+
+ let(:mr_service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project, source_branch: pipeline_branch) }
+ let(:mr_pipeline) { mr_service.execute(merge_request).payload }
+ let(:mr_build_names) { mr_pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ end
+
+ it 'creates diffblue-cover jobs' do
+ expect(mr_build_names).to include('diffblue-cover')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index 36c6e805bdf..98f0d32960b 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe 'CI YML Templates' do
context 'that support autodevops' do
exceptions = [
+ 'Diffblue-Cover.gitlab-ci.yml', # no auto-devops
'Security/DAST.gitlab-ci.yml', # DAST stage is defined inside AutoDevops yml
'Security/DAST-API.gitlab-ci.yml', # no auto-devops
'Security/API-Fuzzing.gitlab-ci.yml', # no auto-devops
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index 860a1fd30bd..f8d67a6f0b4 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -66,6 +66,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
let_it_be(:assignees) { create_list(:user, 2) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:labels) { create_list(:label, 2) }
+ let(:merge_request_description) { nil }
let(:merge_request) do
create(:merge_request, :simple,
@@ -73,6 +74,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
target_project: project,
assignees: assignees,
milestone: milestone,
+ description: merge_request_description,
labels: labels)
end
@@ -113,6 +115,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
merge_request.source_branch
).to_s,
'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description,
+ 'CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED' => 'false',
'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
@@ -121,6 +125,78 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
'CI_MERGE_REQUEST_SQUASH_ON_MERGE' => merge_request.squash_on_merge?.to_s
end
+ context 'when merge request description hits the limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH + 1) }
+
+ it 'truncates the exposed description' do
+ truncated_description = merge_request.description.truncate(
+ MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH
+ )
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => truncated_description,
+ 'CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED' => 'true'
+ )
+ end
+ end
+
+ context 'when merge request description fits the length limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH - 1) }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description,
+ 'CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED' => 'false'
+ )
+ end
+ end
+
+ context 'when truncate_ci_merge_request_description feature flag is disabled' do
+ before do
+ stub_feature_flags(truncate_ci_merge_request_description: false)
+ end
+
+ context 'when merge request description hits the limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH + 1) }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
+ )
+ expect(subject.to_hash)
+ .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
+ end
+ end
+
+ context 'when merge request description fits the length limit' do
+ let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH - 1) }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
+ )
+ expect(subject.to_hash)
+ .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
+ end
+ end
+
+ context 'when merge request description does not exist' do
+ let(:merge_request_description) { nil }
+
+ it 'does not truncate the exposed description' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
+ )
+ expect(subject.to_hash)
+ .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
+ end
+ end
+ end
+
it 'exposes diff variables' do
expect(subject.to_hash)
.to include(
@@ -214,6 +290,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => merge_request.source_branch_sha,
'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description,
'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list,
'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','),
diff --git a/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb b/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
index cd68b0cdf2b..f5845e492bc 100644
--- a/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
+++ b/spec/lib/gitlab/ci/variables/downstream/generator_spec.rb
@@ -39,6 +39,15 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
]
end
+ let(:pipeline_dotenv_variables) do
+ [
+ { key: 'PIPELINE_DOTENV_VAR1', value: 'variable 1' },
+ { key: 'PIPELINE_DOTENV_VAR2', value: 'variable 2' },
+ { key: 'PIPELINE_DOTENV_RAW_VAR3', value: '$REF1', raw: true },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR4', value: 'interpolate $REF1 $REF2' }
+ ]
+ end
+
let(:bridge) do
instance_double(
'Ci::Bridge',
@@ -48,7 +57,8 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
expand_file_refs?: false,
yaml_variables: yaml_variables,
pipeline_variables: pipeline_variables,
- pipeline_schedule_variables: pipeline_schedule_variables
+ pipeline_schedule_variables: pipeline_schedule_variables,
+ dependency_variables: pipeline_dotenv_variables
)
end
@@ -69,7 +79,12 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
{ key: 'PIPELINE_SCHEDULE_VAR1', value: 'variable 1' },
{ key: 'PIPELINE_SCHEDULE_VAR2', value: 'variable 2' },
{ key: 'PIPELINE_SCHEDULE_RAW_VAR3', value: '$REF1', raw: true },
- { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR4', value: 'interpolate ref 1 ref 2' }
+ { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR4', value: 'interpolate ref 1 ref 2' },
+ { key: 'PIPELINE_DOTENV_VAR1', value: 'variable 1' },
+ { key: 'PIPELINE_DOTENV_VAR2', value: 'variable 2' },
+ { key: 'PIPELINE_DOTENV_RAW_VAR3', value: '$REF1', raw: true },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR4', value: 'interpolate ref 1 ref 2' }
+
]
expect(generator.calculate).to contain_exactly(*expected)
@@ -79,6 +94,7 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
allow(bridge).to receive(:yaml_variables).and_return([])
allow(bridge).to receive(:pipeline_variables).and_return([])
allow(bridge).to receive(:pipeline_schedule_variables).and_return([])
+ allow(bridge).to receive(:dependency_variables).and_return([])
expect(generator.calculate).to be_empty
end
@@ -105,6 +121,10 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
[{ key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate $REF1 $REF2 $FILE_REF3 $FILE_REF4' }]
end
+ let(:pipeline_dotenv_variables) do
+ [{ key: 'PIPELINE_DOTENV_INTERPOLATION_VAR', value: 'interpolate $REF1 $REF2 $FILE_REF3 $FILE_REF4' }]
+ end
+
context 'when expand_file_refs is true' do
before do
allow(bridge).to receive(:expand_file_refs?).and_return(true)
@@ -114,7 +134,8 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
expected = [
{ key: 'INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
{ key: 'PIPELINE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
- { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' }
+ { key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR', value: 'interpolate ref 1 ref 3 ' }
]
expect(generator.calculate).to contain_exactly(*expected)
@@ -131,6 +152,7 @@ RSpec.describe Gitlab::Ci::Variables::Downstream::Generator, feature_category: :
{ key: 'INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
{ key: 'PIPELINE_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
{ key: 'PIPELINE_SCHEDULE_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
+ { key: 'PIPELINE_DOTENV_INTERPOLATION_VAR', value: 'interpolate ref 1 $FILE_REF3 ' },
{ key: 'FILE_REF3', value: 'ref 3', variable_type: :file }
]
diff --git a/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
new file mode 100644
index 00000000000..03ff7077969
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Gitlab
+ module Ci
+ RSpec.describe YamlProcessor, feature_category: :pipeline_composition do
+ subject(:processor) { described_class.new(config, user: nil).execute }
+
+ let(:builds) { processor.builds }
+
+ context 'with interruptible' do
+ let(:default_config) { nil }
+
+ let(:config) do
+ <<~YAML
+ #{default_config}
+
+ build1:
+ script: rspec
+ interruptible: true
+
+ build2:
+ script: rspec
+ interruptible: false
+
+ build3:
+ script: rspec
+
+ bridge1:
+ trigger: some/project
+ interruptible: true
+
+ bridge2:
+ trigger: some/project
+ interruptible: false
+
+ bridge3:
+ trigger: some/project
+ YAML
+ end
+
+ it 'returns jobs with their interruptible value' do
+ expect(builds).to contain_exactly(
+ a_hash_including(name: 'build1', interruptible: true),
+ a_hash_including(name: 'build2', interruptible: false),
+ a_hash_including(name: 'build3').and(exclude(:interruptible)),
+ a_hash_including(name: 'bridge1', interruptible: true),
+ a_hash_including(name: 'bridge2', interruptible: false),
+ a_hash_including(name: 'bridge3').and(exclude(:interruptible))
+ )
+ end
+
+ context 'when default:interruptible is true' do
+ let(:default_config) do
+ <<~YAML
+ default:
+ interruptible: true
+ YAML
+ end
+
+ it 'returns jobs with their interruptible value' do
+ expect(builds).to contain_exactly(
+ a_hash_including(name: 'build1', interruptible: true),
+ a_hash_including(name: 'build2', interruptible: false),
+ a_hash_including(name: 'build3', interruptible: true),
+ a_hash_including(name: 'bridge1', interruptible: true),
+ a_hash_including(name: 'bridge2', interruptible: false),
+ a_hash_including(name: 'bridge3', interruptible: true)
+ )
+ end
+ end
+
+ context 'when default:interruptible is false' do
+ let(:default_config) do
+ <<~YAML
+ default:
+ interruptible: false
+ YAML
+ end
+
+ it 'returns jobs with their interruptible value' do
+ expect(builds).to contain_exactly(
+ a_hash_including(name: 'build1', interruptible: true),
+ a_hash_including(name: 'build2', interruptible: false),
+ a_hash_including(name: 'build3', interruptible: false),
+ a_hash_including(name: 'bridge1', interruptible: true),
+ a_hash_including(name: 'bridge2', interruptible: false),
+ a_hash_including(name: 'bridge3', interruptible: false)
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index f01c1c7d053..844a6849c8f 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -123,55 +123,6 @@ module Gitlab
end
end
- describe 'interruptible entry' do
- describe 'interruptible job' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec', interruptible: true })
- end
-
- it { expect(rspec_build[:interruptible]).to be_truthy }
- end
-
- describe 'interruptible job with default value' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec' })
- end
-
- it { expect(rspec_build).not_to have_key(:interruptible) }
- end
-
- describe 'uninterruptible job' do
- let(:config) do
- YAML.dump(rspec: { script: 'rspec', interruptible: false })
- end
-
- it { expect(rspec_build[:interruptible]).to be_falsy }
- end
-
- it "returns interruptible when overridden for job" do
- config = YAML.dump({ default: { interruptible: true },
- rspec: { script: "rspec" } })
-
- config_processor = described_class.new(config).execute
- builds = config_processor.builds.select { |b| b[:stage] == "test" }
-
- expect(builds.size).to eq(1)
- expect(builds.first).to eq({
- stage: "test",
- stage_idx: 2,
- name: "rspec",
- only: { refs: %w[branches tags] },
- options: { script: ["rspec"] },
- interruptible: true,
- allow_failure: false,
- when: "on_success",
- job_variables: [],
- root_variables_inheritance: true,
- scheduling_type: :stage
- })
- end
- end
-
describe 'retry entry' do
context 'when retry count is specified' do
let(:config) do
@@ -544,6 +495,27 @@ module Gitlab
expect(subject.workflow_name).to be_nil
end
end
+
+ context 'with auto_cancel' do
+ let(:config) do
+ <<-YML
+ workflow:
+ auto_cancel:
+ on_new_commit: interruptible
+ on_job_failure: all
+
+ hello:
+ script: echo world
+ YML
+ end
+
+ it 'parses the workflow:auto_cancel as workflow_auto_cancel' do
+ expect(subject.workflow_auto_cancel).to eq({
+ on_new_commit: 'interruptible',
+ on_job_failure: 'all'
+ })
+ end
+ end
end
describe '#warnings' do
@@ -1313,6 +1285,46 @@ module Gitlab
})
end
end
+
+ context 'when image and service have docker options' do
+ let(:config) do
+ <<~YAML
+ test:
+ script: exit 0
+ image:
+ name: ruby:2.7
+ docker:
+ platform: linux/amd64
+ services:
+ - name: postgres:11.9
+ docker:
+ platform: linux/amd64
+ YAML
+ end
+
+ it { is_expected.to be_valid }
+
+ it "returns with image" do
+ expect(processor.builds).to contain_exactly({
+ stage: "test",
+ stage_idx: 2,
+ name: "test",
+ only: { refs: %w[branches tags] },
+ options: {
+ script: ["exit 0"],
+ image: { name: "ruby:2.7",
+ executor_opts: { docker: { platform: 'linux/amd64' } } },
+ services: [{ name: "postgres:11.9",
+ executor_opts: { docker: { platform: 'linux/amd64' } } }]
+ },
+ allow_failure: false,
+ when: "on_success",
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ })
+ end
+ end
end
describe 'Variables' do
diff --git a/spec/lib/gitlab/circuit_breaker/notifier_spec.rb b/spec/lib/gitlab/circuit_breaker/notifier_spec.rb
new file mode 100644
index 00000000000..1640ebb99f9
--- /dev/null
+++ b/spec/lib/gitlab/circuit_breaker/notifier_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::CircuitBreaker::Notifier, feature_category: :shared do
+ subject(:instance) { described_class.new }
+
+ describe '#notify' do
+ context 'when event is failure' do
+ it 'sends an exception to Gitlab::ErrorTracking' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ instance.notify('test_service', 'failure')
+ end
+ end
+
+ context 'when event is not failure' do
+ it 'does not send an exception to Gitlab::ErrorTracking' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ instance.notify('test_service', 'test_event')
+ end
+ end
+ end
+
+ describe '#notify_warning' do
+ it do
+ expect { instance.notify_warning('test_service', 'test_message') }.not_to raise_error
+ end
+ end
+
+ describe '#notify_run' do
+ it do
+ expect { instance.notify_run('test_service') { puts 'test block' } }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/lib/gitlab/circuit_breaker/store_spec.rb b/spec/lib/gitlab/circuit_breaker/store_spec.rb
new file mode 100644
index 00000000000..1b1983d4b52
--- /dev/null
+++ b/spec/lib/gitlab/circuit_breaker/store_spec.rb
@@ -0,0 +1,201 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::CircuitBreaker::Store, :clean_gitlab_redis_rate_limiting, feature_category: :ai_abstraction_layer do
+ let(:key) { 'key-1' }
+ let(:value) { 'value' }
+ let(:circuit_store) { described_class.new }
+
+ shared_examples 'reliable circuit breaker store method' do
+ it 'does not raise an error when Redis::BaseConnectionError is encountered' do
+ allow(Gitlab::Redis::RateLimiting)
+ .to receive(:with)
+ .and_raise(Redis::BaseConnectionError)
+
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ describe '#key?' do
+ subject(:key?) { circuit_store.key?(key) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when key exists' do
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when key does not exist' do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#store' do
+ let(:options) { {} }
+
+ subject(:store) { circuit_store.store(key, value, options) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ it 'stores value for specified key without expiry by default' do
+ expect(store).to eq(value)
+
+ with_redis do |redis|
+ expect(redis.get(key)).to eq(value)
+ expect(redis.ttl(key)).to eq(-1)
+ end
+ end
+
+ context 'when expires option is set' do
+ let(:options) { { expires: 10 } }
+
+ it 'stores value for specified key with expiry' do
+ expect(store).to eq(value)
+
+ with_redis do |redis|
+ expect(redis.get(key)).to eq(value)
+ expect(redis.ttl(key)).to eq(10)
+ end
+ end
+ end
+ end
+
+ describe '#increment' do
+ let(:options) { {} }
+
+ subject(:increment) { circuit_store.increment(key, 1, options) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when key does not exist' do
+ it 'sets key and increments value' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(1)
+ expect(redis.ttl(key)).to eq(-1)
+ end
+ end
+
+ context 'with expiry' do
+ let(:options) { { expires: 10 } }
+
+ it 'sets key and increments value with expiration' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(1)
+ expect(redis.ttl(key)).to eq(10)
+ end
+ end
+ end
+ end
+
+ context 'when key exists' do
+ before do
+ circuit_store.store(key, 1)
+ end
+
+ it 'increments value' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(2)
+ expect(redis.ttl(key)).to eq(-1)
+ end
+ end
+
+ context 'with expiry' do
+ let(:options) { { expires: 10 } }
+
+ it 'increments value with expiration' do
+ increment
+
+ with_redis do |redis|
+ expect(redis.get(key).to_i).to eq(2)
+ expect(redis.ttl(key)).to eq(10)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#load' do
+ subject(:load) { circuit_store.load(key) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when key exists' do
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it 'returns the value of the key' do
+ expect(load).to eq(value)
+ end
+ end
+
+ context 'when key does not exist' do
+ it 'returns nil' do
+ expect(load).to be_nil
+ end
+ end
+ end
+
+ describe '#values_at' do
+ let(:other_key) { 'key-2' }
+ let(:other_value) { 'value-2' }
+
+ subject(:values_at) { circuit_store.values_at(key, other_key) }
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ context 'when keys exist' do
+ before do
+ circuit_store.store(key, value)
+ circuit_store.store(other_key, other_value)
+ end
+
+ it 'returns values of keys' do
+ expect(values_at).to match_array([value, other_value])
+ end
+ end
+
+ context 'when some keys do not exist' do
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it 'returns values of keys with nil for non-existing ones' do
+ expect(values_at).to match_array([value, nil])
+ end
+ end
+ end
+
+ describe '#delete' do
+ subject(:delete) { circuit_store.delete(key) }
+
+ before do
+ circuit_store.store(key, value)
+ end
+
+ it_behaves_like 'reliable circuit breaker store method'
+
+ it 'deletes key' do
+ delete
+
+ with_redis do |redis|
+ expect(redis.exists?(key)).to eq(false)
+ end
+ end
+ end
+
+ def with_redis(&block)
+ Gitlab::Redis::RateLimiting.with(&block)
+ end
+end
diff --git a/spec/lib/gitlab/circuit_breaker_spec.rb b/spec/lib/gitlab/circuit_breaker_spec.rb
new file mode 100644
index 00000000000..4cd2f41869e
--- /dev/null
+++ b/spec/lib/gitlab/circuit_breaker_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::CircuitBreaker, :clean_gitlab_redis_rate_limiting, feature_category: :shared do
+ let(:service_name) { 'DummyService' }
+ let(:volume_threshold) { 5 }
+ let(:circuit) do
+ Circuitbox.circuit(service_name,
+ { volume_threshold: volume_threshold, exceptions: [Gitlab::CircuitBreaker::InternalServerError] })
+ end
+
+ let(:dummy_class) do
+ Class.new do
+ def dummy_method
+ Gitlab::CircuitBreaker.run_with_circuit('DummyService') do
+ raise Gitlab::CircuitBreaker::InternalServerError
+ end
+ end
+
+ def another_dummy_method
+ Gitlab::CircuitBreaker.run_with_circuit('DummyService') do
+ # Do nothing but successful.
+ end
+ end
+ end
+ end
+
+ subject(:instance) { dummy_class.new }
+
+ before do
+ stub_const(service_name, dummy_class)
+ allow(Circuitbox).to receive(:circuit).and_return(circuit)
+ end
+
+ # rubocop: disable RSpec/AnyInstanceOf -- the instance is defined by an initializer
+ describe '#circuit' do
+ it 'returns nil value' do
+ expect(instance.dummy_method).to be_nil
+ end
+
+ it 'does not raise an error' do
+ expect { instance.dummy_method }.not_to raise_error
+ end
+
+ context 'when failed multiple times below volume threshold' do
+ it 'does not open the circuit' do
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'failure')
+ .exactly(4).times
+
+ 4.times do
+ instance.dummy_method
+ end
+
+ expect(circuit).not_to be_open
+ end
+ end
+
+ context 'when failed multiple times over volume threshold' do
+ it 'allows the call 5 times, then opens the circuit and skips subsequent calls' do
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'failure')
+ .exactly(5).times
+
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'open')
+ .once
+
+ expect_any_instance_of(Gitlab::CircuitBreaker::Notifier).to receive(:notify)
+ .with(anything, 'skipped')
+ .once
+
+ 6.times do
+ instance.dummy_method
+ end
+
+ expect(circuit).to be_open
+ end
+ end
+
+ context 'when circuit is previously open' do
+ before do
+ # Opens the circuit
+ 6.times do
+ instance.dummy_method
+ end
+
+ # Deletes the open key
+ circuit.try_close_next_time
+ end
+
+ context 'when does not fail again' do
+ it 'closes the circuit' do
+ instance.another_dummy_method
+
+ expect(circuit).not_to be_open
+ end
+ end
+
+ context 'when fails again' do
+ it 'opens the circuit' do
+ instance.dummy_method
+
+ expect(circuit).to be_open
+ end
+ end
+ end
+ end
+ # rubocop: enable RSpec/AnyInstanceOf
+
+ describe '#run_with_circuit' do
+ let(:block) { proc {} }
+
+ it 'runs the code block within the Circuitbox circuit' do
+ expect(circuit).to receive(:run).with(exception: false, &block)
+ described_class.run_with_circuit('service', &block)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb
index 326e27fa716..d1dbd167d48 100644
--- a/spec/lib/gitlab/contributions_calendar_spec.rb
+++ b/spec/lib/gitlab/contributions_calendar_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
end
- let_it_be(:feature_project) do
+ let_it_be(:public_project_with_private_issues) do
create(:project, :public, :issues_private) do |project|
- create(:project_member, user: contributor, project: project).project
+ create(:project_member, user: contributor, project: project)
end
end
@@ -45,7 +45,12 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
def create_event(project, day, hour = 0, action = :created, target_symbol = :issue)
- targets[project] ||= create(target_symbol, project: project, author: contributor)
+ targets[project] ||=
+ if target_symbol == :merge_request
+ create(:merge_request, source_project: project, author: contributor)
+ else
+ create(target_symbol, project: project, author: contributor)
+ end
Event.create!(
project: project,
@@ -58,7 +63,7 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
describe '#activity_dates', :aggregate_failures do
- it "returns a hash of date => count" do
+ it 'returns a hash of date => count' do
create_event(public_project, last_week)
create_event(public_project, last_week)
create_event(public_project, today)
@@ -114,6 +119,15 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
expect(calendar(contributor).activity_dates[today]).to eq(2)
end
+ it "counts merge request events" do
+ create_event(public_project, today, 0, :created, :merge_request)
+ create_event(public_project, today, 1, :closed, :merge_request)
+ create_event(public_project, today, 2, :approved, :merge_request)
+ create_event(public_project, today, 3, :merged, :merge_request)
+
+ expect(calendar(contributor).activity_dates[today]).to eq(4)
+ end
+
context "when events fall under different dates depending on the system time zone" do
before do
create_event(public_project, today, 1)
@@ -189,10 +203,10 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
it "only shows private events to authorized users" do
e1 = create_event(public_project, today)
e2 = create_event(private_project, today)
- e3 = create_event(feature_project, today)
+ e3 = create_event(public_project_with_private_issues, today, 0, :created, :issue)
create_event(public_project, last_week)
- expect(calendar.events_by_date(today)).to contain_exactly(e1, e3)
+ expect(calendar.events_by_date(today)).to contain_exactly(e1)
expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3)
end
@@ -202,6 +216,17 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
expect(calendar.events_by_date(today)).to contain_exactly(e1)
end
+ it 'includes merge request events' do
+ mr_created_event = create_event(public_project, today, 0, :created, :merge_request)
+ mr_closed_event = create_event(public_project, today, 1, :closed, :merge_request)
+ mr_approved_event = create_event(public_project, today, 2, :approved, :merge_request)
+ mr_merged_event = create_event(public_project, today, 3, :merged, :merge_request)
+
+ expect(calendar.events_by_date(today)).to contain_exactly(
+ mr_created_event, mr_closed_event, mr_approved_event, mr_merged_event
+ )
+ end
+
context 'when the user cannot read cross project' do
before do
allow(Ability).to receive(:allowed?).and_call_original
@@ -215,40 +240,4 @@ RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do
end
end
end
-
- describe '#starting_year' do
- let(:travel_time) { Time.find_zone('UTC').local(2020, 12, 31, 19, 0, 0) }
-
- context "when the contributor's timezone is not set" do
- it "is the start of last year in the system timezone" do
- expect(calendar.starting_year).to eq(2019)
- end
- end
-
- context "when the contributor's timezone is set to Sydney" do
- let(:contributor) { create(:user, { timezone: 'Sydney' }) }
-
- it "is the start of last year in Sydney" do
- expect(calendar.starting_year).to eq(2020)
- end
- end
- end
-
- describe '#starting_month' do
- let(:travel_time) { Time.find_zone('UTC').local(2020, 12, 31, 19, 0, 0) }
-
- context "when the contributor's timezone is not set" do
- it "is the start of this month in the system timezone" do
- expect(calendar.starting_month).to eq(12)
- end
- end
-
- context "when the contributor's timezone is set to Sydney" do
- let(:contributor) { create(:user, { timezone: 'Sydney' }) }
-
- it "is the start of this month in Sydney" do
- expect(calendar.starting_month).to eq(1)
- end
- end
- end
end
diff --git a/spec/lib/gitlab/counters/buffered_counter_spec.rb b/spec/lib/gitlab/counters/buffered_counter_spec.rb
index 4fd152eb805..e9b3eb3ae62 100644
--- a/spec/lib/gitlab/counters/buffered_counter_spec.rb
+++ b/spec/lib/gitlab/counters/buffered_counter_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
describe '#get' do
it 'returns the value when there is an existing value stored in the counter' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.set(counter.key, 456)
end
@@ -393,7 +393,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
it 'removes the key from Redis' do
counter.initiate_refresh!
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
expect(redis.exists?(counter.key)).to eq(false)
end
end
@@ -488,7 +488,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
it 'removes all tracking keys' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
expect { counter.cleanup_refresh }
.to change { redis.scan_each(match: "#{counter.refresh_key}*").to_a.count }.from(4).to(0)
end
@@ -533,7 +533,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
let(:flushed_amount) { 10 }
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.incrby(counter.flushed_key, flushed_amount)
end
end
@@ -546,7 +546,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
it 'deletes the relative :flushed key' do
counter.commit_increment!
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
key_exists = redis.exists?(counter.flushed_key)
expect(key_exists).to be_falsey
end
@@ -555,7 +555,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
context 'when deleting :flushed key fails' do
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.incrby(counter.flushed_key, 10)
allow(redis).to receive(:del).and_raise('could not delete key')
@@ -614,7 +614,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
with_them do
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.set(increment_key, increment) if increment
redis.set(flushed_key, flushed) if flushed
end
@@ -635,19 +635,19 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
def redis_get_key(key)
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.get(key)
end
end
def redis_exists_key(key)
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.exists?(key)
end
end
def redis_key_ttl(key)
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::BufferedCounter.with do |redis|
redis.ttl(key)
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb b/spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb
new file mode 100644
index 00000000000..b3aa0c194d2
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batched_background_migration_dictionary_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Database::BackgroundMigration::BatchedBackgroundMigrationDictionary, feature_category: :database do
+ describe '.entry' do
+ it 'returns a single dictionary entry for the given migration job' do
+ entry = described_class.entry('MigrateHumanUserType')
+ expect(entry.migration_job_name).to eq('MigrateHumanUserType')
+ expect(entry.finalized_by).to eq(20230523101514)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index f70b38377d8..ffede2b6759 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -911,4 +911,18 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(actual).to contain_exactly(migration)
end
end
+
+ describe '#finalize_command' do
+ let_it_be(:migration) do
+ create(
+ :batched_background_migration,
+ gitlab_schema: :gitlab_main,
+ job_arguments: [['column_1'], ['column_1_convert_to_bigint']]
+ )
+ end
+
+ it 'generates the correct finalize command' do
+ expect(migration.finalize_command).to eq("sudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"column_1\"]\\,[\"column_1_convert_to_bigint\"]]']")
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/decomposition/migrate_spec.rb b/spec/lib/gitlab/database/decomposition/migrate_spec.rb
new file mode 100644
index 00000000000..fa2248e8d84
--- /dev/null
+++ b/spec/lib/gitlab/database/decomposition/migrate_spec.rb
@@ -0,0 +1,180 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Decomposition::Migrate, :delete, query_analyzers: false, feature_category: :cell do
+ let(:ci_database_name) do
+ config = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash
+
+ "#{config[:database]}_ci"
+ end
+
+ let(:ci_connection) do
+ database_model = self.class.const_set(:TestCiApplicationRecord, Class.new(ApplicationRecord))
+
+ database_model.establish_connection(
+ ActiveRecord::DatabaseConfigurations::HashConfig.new(
+ ActiveRecord::Base.connection_db_config.env_name,
+ 'ci',
+ ActiveRecord::Base.connection_db_config.configuration_hash.dup.merge(database: ci_database_name)
+ )
+ )
+
+ Gitlab::Database::LoadBalancing::Setup.new(database_model).setup
+
+ database_model.connection
+ end
+
+ let(:backup_location_postfix) { SecureRandom.alphanumeric(10) }
+
+ before do
+ skip_if_database_exists(:ci)
+
+ allow(SecureRandom).to receive(:alphanumeric).with(10).and_return(backup_location_postfix)
+ end
+
+ after do
+ Milestone.delete_all
+ Ci::Pipeline.delete_all
+ end
+
+ describe '#new' do
+ context 'when backup_location is not specified' do
+ subject(:instance) { described_class.new }
+
+ it 'defaults to subdirectory of configured backup location' do
+ expect(instance.instance_variable_get(:@backup_location)).to eq(
+ File.join(Gitlab.config.backup.path, "migration_#{backup_location_postfix}")
+ )
+ end
+ end
+
+ context 'when backup_location is specified' do
+ let(:backup_base_location) { Rails.root.join('tmp') }
+
+ subject(:instance) { described_class.new(backup_base_location: backup_base_location) }
+
+ it 'uses subdirectory of specified backup_location' do
+ expect(instance.instance_variable_get(:@backup_location)).to eq(
+ File.join(backup_base_location, "migration_#{backup_location_postfix}")
+ )
+ end
+
+ context 'when specified_backup_location does not exist' do
+ let(:backup_base_location) { Rails.root.join('tmp', SecureRandom.alphanumeric(10)) }
+
+ context 'and creation of the directory succeeds' do
+ it 'uses subdirectory of specified backup_location' do
+ expect(instance.instance_variable_get(:@backup_location)).to eq(
+ File.join(backup_base_location, "migration_#{backup_location_postfix}")
+ )
+ end
+ end
+
+ context 'and creation of the directory fails' do
+ before do
+ allow(FileUtils).to receive(:mkdir_p).with(backup_base_location).and_raise(Errno::EROFS.new)
+ end
+
+ it 'raises error' do
+ expect { instance.process! }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Failed to create directory #{backup_base_location}: Read-only file system"
+ )
+ end
+ end
+ end
+ end
+ end
+
+ describe '#process!' do
+ subject(:process) { described_class.new.process! }
+
+ before do
+ # Database `ci` is not configured. But it can still exist. So drop and create it
+ ActiveRecord::Base.connection.execute("DROP DATABASE IF EXISTS #{ci_database_name} WITH (FORCE)")
+ ActiveRecord::Base.connection.execute("CREATE DATABASE #{ci_database_name}")
+ end
+
+ context 'when the checks pass' do
+ let!(:milestone) { create(:milestone) }
+ let!(:ci_pipeline) { create(:ci_pipeline) }
+
+ it 'copies main database to ci database' do
+ process
+
+ ci_milestones = ci_connection.execute("SELECT COUNT(*) FROM milestones").getvalue(0, 0)
+ ci_pipelines = ci_connection.execute("SELECT COUNT(*) FROM ci_pipelines").getvalue(0, 0)
+
+ expect(ci_milestones).to be(Milestone.count)
+ expect(ci_pipelines).to be(Ci::Pipeline.count)
+ end
+ end
+
+ context 'when local diskspace is not enough' do
+ let(:backup_location) { described_class.new.backup_location }
+ let(:fake_stats) { instance_double(Sys::Filesystem::Stat, bytes_free: 1000) }
+
+ before do
+ allow(Sys::Filesystem).to receive(:stat).with(File.expand_path("#{backup_location}/../")).and_return(fake_stats)
+ end
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ /Not enough diskspace available on #{backup_location}: Available: (.+?), Needed: (.+?)/
+ )
+ end
+ end
+
+ context 'when connection to ci database fails' do
+ before do
+ ActiveRecord::Base.connection.execute("DROP DATABASE IF EXISTS #{ci_database_name} WITH (FORCE)")
+ end
+
+ it 'raises error' do
+ host = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash[:host]
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Can't connect to database '#{ci_database_name} on host '#{host}'. Ensure the database has been created.")
+ end
+ end
+
+ context 'when ci database is not empty' do
+ before do
+ ci_connection.execute("CREATE TABLE IF NOT EXISTS _test_table (id integer, primary key (id))")
+ end
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Database '#{ci_database_name}' is not empty"
+ )
+ end
+ end
+
+ context 'when already on decomposed setup' do
+ before do
+ allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_MULTIPLE_DATABASES)
+ end
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "GitLab is already configured to run on multiple databases"
+ )
+ end
+ end
+
+ context 'when not all background migrations are finished' do
+ let!(:batched_migration) { create(:batched_background_migration, :active) }
+
+ it 'raises error' do
+ expect { process }.to raise_error(
+ Gitlab::Database::Decomposition::MigrateError,
+ "Found 1 unfinished Background Migration(s). Please wait until they are finished."
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/dictionary_spec.rb b/spec/lib/gitlab/database/dictionary_spec.rb
index 6d2de41468b..261cf27ed69 100644
--- a/spec/lib/gitlab/database/dictionary_spec.rb
+++ b/spec/lib/gitlab/database/dictionary_spec.rb
@@ -3,81 +3,104 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Dictionary, feature_category: :database do
- subject(:database_dictionary) { described_class.new(file_path) }
+ describe '.entries' do
+ it 'all tables and views are unique' do
+ table_and_view_names = described_class.entries('')
+ table_and_view_names += described_class.entries('views')
+
+ # ignore gitlab_internal due to `ar_internal_metadata`, `schema_migrations`
+ table_and_view_names = table_and_view_names
+ .reject { |database_dictionary| database_dictionary.schema?('gitlab_internal') }
+
+ duplicated_tables = table_and_view_names
+ .group_by(&:key_name)
+ .select { |_, schemas| schemas.count > 1 }
+ .keys
+
+ expect(duplicated_tables).to be_empty, \
+ "Duplicated table(s) #{duplicated_tables.to_a} found in #{described_class}.views_and_tables_to_schema. " \
+ "Any duplicated table must be removed from db/docs/ or ee/db/docs/. " \
+ "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
+ end
+ end
- context 'for a table' do
- let(:file_path) { 'db/docs/application_settings.yml' }
+ describe '::Entry' do
+ subject(:database_dictionary) { described_class::Entry.new(file_path) }
- describe '#name_and_schema' do
- it 'returns the name of the table and its gitlab schema' do
- expect(database_dictionary.name_and_schema).to match_array(['application_settings', :gitlab_main_clusterwide])
+ context 'for a table' do
+ let(:file_path) { 'db/docs/application_settings.yml' }
+
+ describe '#name_and_schema' do
+ it 'returns the name of the table and its gitlab schema' do
+ expect(database_dictionary.name_and_schema).to match_array(['application_settings', :gitlab_main_clusterwide])
+ end
end
- end
- describe '#table_name' do
- it 'returns the name of the table' do
- expect(database_dictionary.table_name).to eq('application_settings')
+ describe '#table_name' do
+ it 'returns the name of the table' do
+ expect(database_dictionary.table_name).to eq('application_settings')
+ end
end
- end
- describe '#view_name' do
- it 'returns nil' do
- expect(database_dictionary.view_name).to be_nil
+ describe '#view_name' do
+ it 'returns nil' do
+ expect(database_dictionary.view_name).to be_nil
+ end
end
- end
- describe '#milestone' do
- it 'returns the milestone in which the table was introduced' do
- expect(database_dictionary.milestone).to eq('7.7')
+ describe '#milestone' do
+ it 'returns the milestone in which the table was introduced' do
+ expect(database_dictionary.milestone).to eq('7.7')
+ end
end
- end
- describe '#gitlab_schema' do
- it 'returns the gitlab_schema of the table' do
- expect(database_dictionary.table_name).to eq('application_settings')
+ describe '#gitlab_schema' do
+ it 'returns the gitlab_schema of the table' do
+ expect(database_dictionary.table_name).to eq('application_settings')
+ end
end
- end
- describe '#schema?' do
- it 'checks if the given schema matches the schema of the table' do
- expect(database_dictionary.schema?('gitlab_main')).to eq(false)
- expect(database_dictionary.schema?('gitlab_main_clusterwide')).to eq(true)
+ describe '#schema?' do
+ it 'checks if the given schema matches the schema of the table' do
+ expect(database_dictionary.schema?('gitlab_main')).to eq(false)
+ expect(database_dictionary.schema?('gitlab_main_clusterwide')).to eq(true)
+ end
end
- end
- describe '#key_name' do
- it 'returns the value of the name of the table' do
- expect(database_dictionary.key_name).to eq('application_settings')
+ describe '#key_name' do
+ it 'returns the value of the name of the table' do
+ expect(database_dictionary.key_name).to eq('application_settings')
+ end
end
- end
- describe '#validate!' do
- it 'raises an error if the gitlab_schema is empty' do
- allow(database_dictionary).to receive(:gitlab_schema).and_return(nil)
+ describe '#validate!' do
+ it 'raises an error if the gitlab_schema is empty' do
+ allow(database_dictionary).to receive(:gitlab_schema).and_return(nil)
- expect { database_dictionary.validate! }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
+ expect { database_dictionary.validate! }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
+ end
end
end
- end
- context 'for a view' do
- let(:file_path) { 'db/docs/views/postgres_constraints.yml' }
+ context 'for a view' do
+ let(:file_path) { 'db/docs/views/postgres_constraints.yml' }
- describe '#table_name' do
- it 'returns nil' do
- expect(database_dictionary.table_name).to be_nil
+ describe '#table_name' do
+ it 'returns nil' do
+ expect(database_dictionary.table_name).to be_nil
+ end
end
- end
- describe '#view_name' do
- it 'returns the name of the view' do
- expect(database_dictionary.view_name).to eq('postgres_constraints')
+ describe '#view_name' do
+ it 'returns the name of the view' do
+ expect(database_dictionary.view_name).to eq('postgres_constraints')
+ end
end
- end
- describe '#key_name' do
- it 'returns the value of the name of the view' do
- expect(database_dictionary.key_name).to eq('postgres_constraints')
+ describe '#key_name' do
+ it 'returns the value of the name of the view' do
+ expect(database_dictionary.key_name).to eq('postgres_constraints')
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index a47e53c18a5..7fca47c707c 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -1,13 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.shared_examples 'validate path globs' do |path_globs|
- it 'returns an array of path globs' do
- expect(path_globs).to be_an(Array)
- expect(path_globs).to all(be_an(Pathname))
- end
-end
-
RSpec.shared_examples 'validate schema data' do |tables_and_views|
it 'all tables and views have assigned a known gitlab_schema' do
expect(tables_and_views).to all(
@@ -88,32 +81,6 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
end
end
-
- it 'all tables and views are unique' do
- table_and_view_names = described_class.build_dictionary('')
- table_and_view_names += described_class.build_dictionary('views')
-
- # ignore gitlab_internal due to `ar_internal_metadata`, `schema_migrations`
- table_and_view_names = table_and_view_names
- .reject { |database_dictionary| database_dictionary.schema?('gitlab_internal') }
-
- duplicated_tables = table_and_view_names
- .group_by(&:key_name)
- .select { |_, schemas| schemas.count > 1 }
- .keys
-
- expect(duplicated_tables).to be_empty, \
- "Duplicated table(s) #{duplicated_tables.to_a} found in #{described_class}.views_and_tables_to_schema. " \
- "Any duplicated table must be removed from db/docs/ or ee/db/docs/. " \
- "More info: https://docs.gitlab.com/ee/development/database/database_dictionary.html"
- end
- end
-
- describe '.dictionary_path_globs' do
- include_examples 'validate path globs', described_class.dictionary_path_globs('')
- include_examples 'validate path globs', described_class.dictionary_path_globs('views')
- include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_views')
- include_examples 'validate path globs', described_class.dictionary_path_globs('deleted_tables')
end
describe '.tables_to_schema' do
@@ -306,4 +273,16 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
end
end
end
+
+ describe '.cell_local?' do
+ it 'is true for cell local tables and false otherwise' do
+ expect(described_class.cell_local?('gitlab_ci')).to eq(true)
+ expect(described_class.cell_local?('gitlab_pm')).to eq(true)
+ expect(described_class.cell_local?('gitlab_main_cell')).to eq(true)
+ expect(described_class.cell_local?('gitlab_main')).to eq(false)
+ expect(described_class.cell_local?('gitlab_main_clusterwide')).to eq(false)
+ expect(described_class.cell_local?('gitlab_shared')).to eq(false)
+ expect(described_class.cell_local?('gitlab_internal')).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
index cd145bd5c0f..328cdede794 100644
--- a/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
+++ b/spec/lib/gitlab/database/health_status/indicators/autovacuum_active_on_table_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::Database::HealthStatus::Indicators::AutovacuumActiveOnTab
before do
swapout_view_for_table(:postgres_autovacuum_activity, connection: connection)
+ stub_feature_flags(skip_autovacuum_health_check_for_ci_builds: false)
end
let(:tables) { [table] }
@@ -59,10 +60,34 @@ RSpec.describe Gitlab::Database::HealthStatus::Indicators::AutovacuumActiveOnTab
expect(subject.indicator_class).to eq(described_class)
end
- it 'returns NoSignal signal in case the feature flag is disabled' do
- stub_feature_flags(batched_migrations_health_status_autovacuum: false)
+ context 'with specific feature flags' do
+ it 'returns NotAvailable on batched_migrations_health_status_autovacuum FF being disable' do
+ stub_feature_flags(batched_migrations_health_status_autovacuum: false)
- expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
+ end
+
+ context 'with skip_autovacuum_health_check_for_ci_builds FF being enabled' do
+ before do
+ stub_feature_flags(skip_autovacuum_health_check_for_ci_builds: true)
+ end
+
+ context 'for ci_builds table' do
+ let(:table) { 'ci_builds' }
+
+ it 'returns NotAvailable' do
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::NotAvailable)
+ end
+ end
+
+ context 'for users table' do
+ let(:table) { 'users' }
+
+ it 'returns Stop signal' do
+ expect(subject).to be_a(Gitlab::Database::HealthStatus::Signals::Stop)
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index c975f5b5ee4..3c14dc23a80 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -92,8 +92,20 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store, fe
end
end
+ shared_examples 'restrict within concurrent ruby' do |lb_method|
+ it 'raises an exception when running within a concurrent Ruby thread' do
+ Thread.current[:restrict_within_concurrent_ruby] = true
+
+ expect { |b| lb.public_send(lb_method, &b) }.to raise_error(Gitlab::Utils::ConcurrentRubyThreadIsUsedError,
+ "Cannot run 'db' if running from `Concurrent::Promise`.")
+
+ Thread.current[:restrict_within_concurrent_ruby] = nil
+ end
+ end
+
describe '#read' do
it_behaves_like 'logs service discovery thread interruption', :read
+ it_behaves_like 'restrict within concurrent ruby', :read
it 'yields a connection for a read' do
connection = double(:connection)
@@ -227,6 +239,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store, fe
describe '#read_write' do
it_behaves_like 'logs service discovery thread interruption', :read_write
+ it_behaves_like 'restrict within concurrent ruby', :read_write
it 'yields a connection for a write' do
connection = ActiveRecord::Base.connection_pool.connection
diff --git a/spec/lib/gitlab/database/migration_spec.rb b/spec/lib/gitlab/database/migration_spec.rb
index 18bbc6c1dd3..8390a5ff19e 100644
--- a/spec/lib/gitlab/database/migration_spec.rb
+++ b/spec/lib/gitlab/database/migration_spec.rb
@@ -34,6 +34,12 @@ RSpec.describe Gitlab::Database::Migration do
# untouched.
expect(described_class[described_class.current_version]).to be < ActiveRecord::Migration::Current
end
+
+ it 'matches the version used by Rubocop' do
+ require 'rubocop'
+ load 'rubocop/cop/migration/versioned_migration_class.rb'
+ expect(described_class.current_version).to eq(RuboCop::Cop::Migration::VersionedMigrationClass::CURRENT_MIGRATION_VERSION)
+ end
end
describe Gitlab::Database::Migration::LockRetriesConcern do
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index a81ccf9583a..5c98379d852 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -71,8 +71,11 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
end
context "when the migration doesn't exist already" do
+ let(:version) { '20231204101122' }
+
before do
allow(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
+ allow(migration).to receive(:version).and_return(version)
end
subject(:enqueue_batched_background_migration) do
@@ -81,7 +84,6 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
:projects,
:id,
job_interval: 5.minutes,
- queued_migration_version: format("%.14d", 123),
batch_min_value: 5,
batch_max_value: 1000,
batch_class_name: 'MyBatchClass',
@@ -115,7 +117,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
status_name: :active,
total_tuple_count: pgclass_info.cardinality_estimate,
gitlab_schema: 'gitlab_ci',
- queued_migration_version: format("%.14d", 123)
+ queued_migration_version: version
)
end
end
diff --git a/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb
index 33e83ea2575..a9ef28a4b51 100644
--- a/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb
+++ b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb
@@ -18,6 +18,17 @@ RSpec.describe Gitlab::Database::Migrations::PgBackendPid, feature_category: :da
expect { |b| patched_instance.with_advisory_lock_connection(&b) }.to yield_with_args(:conn)
end
+
+ it 're-yields with same arguments and wraps it with calls to .say even when error is raised' do
+ patched_instance = klass.prepend(described_class).new
+ expect(Gitlab::Database::Migrations::PgBackendPid).to receive(:say).twice
+
+ expect do
+ patched_instance.with_advisory_lock_connection do
+ raise ActiveRecord::ConcurrentMigrationError, 'test'
+ end
+ end.to raise_error ActiveRecord::ConcurrentMigrationError
+ end
end
describe '.patch!' do
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
index c57b8bb5992..60934eb06a5 100644
--- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe 'cross-database foreign keys' do
# should be added as a comment along with the name of the column.
let!(:allowed_cross_database_foreign_keys) do
[
- 'events.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429803
'gitlab_subscriptions.hosted_plan_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422012
'group_import_states.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421210
'identities.saml_provider_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422010
@@ -27,10 +26,8 @@ RSpec.describe 'cross-database foreign keys' do
'namespace_commit_emails.email_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429804
'namespace_commit_emails.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429804
'path_locks.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429380
- 'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044
'protected_branch_push_access_levels.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/431054
'protected_branch_merge_access_levels.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/431055
- 'security_orchestration_policy_configurations.bot_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/429438
'user_group_callouts.user_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/421287
]
end
@@ -59,4 +56,17 @@ RSpec.describe 'cross-database foreign keys' do
end
end
end
+
+ it 'only allows existing foreign keys to be present in the exempted list', :aggregate_failures do
+ allowed_cross_database_foreign_keys.each do |entry|
+ table, _ = entry.split('.')
+
+ all_foreign_keys_for_table = foreign_keys_for(table)
+ fk_entry = all_foreign_keys_for_table.find { |fk| "#{fk.from_table}.#{fk.column}" == entry }
+
+ expect(fk_entry).to be_present,
+ "`#{entry}` is no longer a foreign key. " \
+ "You must remove this entry from the `allowed_cross_database_foreign_keys` list."
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
index 338475fa9c4..d1d7aa12c46 100644
--- a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
+++ b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
@@ -11,7 +11,9 @@ RSpec.describe 'new tables with gitlab_main schema', feature_category: :cell do
# Specific tables can be exempted from this requirement, and such tables must be added to the `exempted_tables` list.
let!(:exempted_tables) do
- []
+ [
+ "audit_events_instance_amazon_s3_configurations" # https://gitlab.com/gitlab-org/gitlab/-/issues/431327
+ ]
end
let!(:starting_from_milestone) { 16.7 }
@@ -48,16 +50,16 @@ RSpec.describe 'new tables with gitlab_main schema', feature_category: :cell do
end
def tables_having_gitlab_main_schema(starting_from_milestone:)
- selected_data = gitlab_main_schema_tables.select do |database_dictionary|
- database_dictionary.milestone.to_f >= starting_from_milestone
+ selected_data = gitlab_main_schema_tables.select do |entry|
+ entry.milestone.to_f >= starting_from_milestone
end
selected_data.map(&:table_name)
end
def gitlab_main_schema_tables
- ::Gitlab::Database::GitlabSchema.build_dictionary('').select do |database_dictionary|
- database_dictionary.schema?('gitlab_main')
+ ::Gitlab::Database::Dictionary.entries.select do |entry|
+ entry.schema?('gitlab_main')
end
end
end
diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
index fa7645d581c..56899924b60 100644
--- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
+++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
@@ -53,11 +53,11 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns, feature_category: :data
expect(subject.execute).to eq(
[
['Testing::A', {
- 'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
- 'also_unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-02-01'), '12.1')
+ 'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0', false),
+ 'also_unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-02-01'), '12.1', false)
}],
['Testing::B', {
- 'other' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0')
+ 'other' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0', false)
}]
])
end
diff --git a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
index 79c2c9e32d2..337749446ed 100644
--- a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
let(:connection) { ActiveRecord::Base.connection }
let(:table_name) { :_test_gitlab_ci_partitioned_test }
let(:model) { class_double(ApplicationRecord, table_name: table_name, connection: connection) }
- let(:next_partition_if) { nil }
- let(:detach_partition_if) { nil }
+ let(:next_partition_if) { ->(_) { false } }
+ let(:detach_partition_if) { ->(_) { false } }
subject(:strategy) do
described_class.new(model, :partition,
@@ -62,6 +62,16 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
it 'is the partition with the largest value' do
expect(strategy.active_partition.value).to eq(101)
end
+
+ context 'when there are no partitions' do
+ before do
+ drop_partitions
+ end
+
+ it 'is the initial partition' do
+ expect(strategy.active_partition.value).to eq(100)
+ end
+ end
end
describe '#missing_partitions' do
@@ -74,6 +84,17 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
expect(extra.length).to eq(1)
expect(extra.first.value).to eq(102)
end
+
+ context 'when there are no partitions for the table' do
+ it 'returns partitions for value 100 and 101' do
+ drop_partitions
+
+ missing_partitions = strategy.missing_partitions
+
+ expect(missing_partitions.size).to eq(2)
+ expect(missing_partitions.map(&:value)).to match_array([100, 101])
+ end
+ end
end
context 'when next_partition_if returns false' do
@@ -85,8 +106,8 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
end
context 'when there are no partitions for the table' do
- it 'returns a partition for value 1' do
- connection.execute("drop table #{table_name}_100; drop table #{table_name}_101;")
+ it 'returns a partition for value 100' do
+ drop_partitions
missing_partitions = strategy.missing_partitions
@@ -201,4 +222,8 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca
})
end
end
+
+ def drop_partitions
+ connection.execute("drop table #{table_name}_100; drop table #{table_name}_101;")
+ end
end
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
index 2e654a33a58..fd2455e25c0 100644
--- a/spec/lib/gitlab/database/postgres_index_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::PostgresIndex do
+RSpec.describe Gitlab::Database::PostgresIndex, feature_category: :database do
let(:schema) { 'public' }
let(:name) { 'foo_idx' }
let(:identifier) { "#{schema}.#{name}" }
@@ -13,6 +13,9 @@ RSpec.describe Gitlab::Database::PostgresIndex do
CREATE UNIQUE INDEX bar_key ON public.users (id);
CREATE TABLE _test_gitlab_main_example_table (id serial primary key);
+
+ CREATE TABLE _test_partitioned (id bigserial primary key not null) PARTITION BY LIST (id);
+ CREATE TABLE _test_partitioned_1 PARTITION OF _test_partitioned FOR VALUES IN (1);
SQL
end
@@ -25,8 +28,8 @@ RSpec.describe Gitlab::Database::PostgresIndex do
it { is_expected.to be_a Gitlab::Database::SharedModel }
describe '.reindexing_support' do
- it 'only non partitioned indexes' do
- expect(described_class.reindexing_support).to all(have_attributes(partitioned: false))
+ it 'includes partitioned indexes' do
+ expect(described_class.reindexing_support.where("name = '_test_partitioned_1_pkey'")).not_to be_empty
end
it 'only indexes that dont serve an exclusion constraint' do
diff --git a/spec/lib/gitlab/database/postgres_sequences_spec.rb b/spec/lib/gitlab/database/postgres_sequences_spec.rb
new file mode 100644
index 00000000000..2373edaea18
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_sequences_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresSequence, type: :model, feature_category: :database do
+ # PostgresSequence does not `behaves_like 'a postgres model'` because it does not correspond 1-1 with a single entry
+ # in pg_class
+ let(:schema) { ActiveRecord::Base.connection.current_schema }
+ let(:table_name) { '_test_table' }
+ let(:table_name_without_sequence) { '_test_table_without_sequence' }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id bigserial PRIMARY KEY NOT NULL
+ );
+
+ CREATE TABLE #{table_name_without_sequence} (
+ id bigint PRIMARY KEY NOT NULL
+ );
+ SQL
+ end
+
+ describe '#by_table_name' do
+ context 'when table does not have a sequence' do
+ it 'returns an empty collection' do
+ expect(described_class.by_table_name(table_name_without_sequence)).to be_empty
+ end
+ end
+
+ it 'returns the sequence for a given table' do
+ expect(described_class.by_table_name(table_name).first[:table_name]).to eq(table_name)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index 3650ca1d904..9570a25238e 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -15,7 +15,9 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin, :d
end
let(:config) { ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash.merge(pool: 1) }
- let(:pool) { model.establish_connection(config) }
+ let(:pool) do
+ model.establish_connection(ActiveRecord::DatabaseConfigurations::HashConfig.new(Rails.env, 'main', config))
+ end
it 'calls the force disconnect callback on checkin' do
connection = pool.connection
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
index 28c155c1eb1..7fcdc59b691 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
@@ -42,12 +42,21 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventSetOperatorMismatch, que
end
context 'when SQL does not include a set operator' do
- let(:sql) { 'SELECT 1' }
+ where(:sql) do
+ [
+ 'SELECT 1',
+ 'SELECT union_station',
+ 'SELECT intersection',
+ 'SELECT deny_all_requests_except_allowed from application_settings'
+ ]
+ end
- it 'does not parse SQL' do
- expect(described_class::SelectStmt).not_to receive(:new)
+ with_them do
+ it 'does not parse SQL' do
+ expect(described_class::SelectStmt).not_to receive(:new)
- process_sql sql
+ process_sql sql
+ end
end
end
diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
index 6a614e2488f..ed83ed9e744 100644
--- a/spec/lib/gitlab/database/schema_migrations/context_spec.rb
+++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
@@ -25,12 +25,15 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
context 'multiple databases', :reestablished_active_record_base do
before do
- connection_class.establish_connection(
+ db_config =
ActiveRecord::Base
.connection_pool
.db_config
.configuration_hash
.merge(configuration_overrides)
+
+ connection_class.establish_connection(
+ ActiveRecord::DatabaseConfigurations::HashConfig.new(Rails.env, 'main', db_config)
)
end
diff --git a/spec/lib/gitlab/database/sharding_key_spec.rb b/spec/lib/gitlab/database/sharding_key_spec.rb
new file mode 100644
index 00000000000..b47f5ea5df0
--- /dev/null
+++ b/spec/lib/gitlab/database/sharding_key_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
+ # Specific tables can be temporarily exempt from this requirement. You must add an issue link in a comment next to
+ # the table name to remove this once a decision has been made.
+ let(:allowed_to_be_missing_sharding_key) do
+ [
+ 'abuse_report_assignees', # https://gitlab.com/gitlab-org/gitlab/-/issues/432365
+ 'sbom_occurrences_vulnerabilities' # https://gitlab.com/gitlab-org/gitlab/-/issues/432900
+ ]
+ end
+
+ # Specific tables can be temporarily exempt from this requirement. You must add an issue link in a comment next to
+ # the table name to remove this once a decision has been made.
+ let(:allowed_to_be_missing_not_null) do
+ [
+ 'labels.project_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/434356
+ 'labels.group_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/434356
+ ]
+ end
+
+ let(:starting_from_milestone) { 16.6 }
+
+ let(:allowed_sharding_key_referenced_tables) { %w[projects namespaces organizations] }
+
+ it 'requires a sharding_key for all cell-local tables, after milestone 16.6', :aggregate_failures do
+ tables_missing_sharding_key(starting_from_milestone: starting_from_milestone).each do |table_name|
+ expect(allowed_to_be_missing_sharding_key).to include(table_name), error_message(table_name)
+ end
+ end
+
+ it 'ensures all sharding_key columns exist and reference projects, namespaces or organizations',
+ :aggregate_failures do
+ all_tables_to_sharding_key.each do |table_name, sharding_key|
+ sharding_key.each do |column_name, referenced_table_name|
+ expect(column_exists?(table_name, column_name)).to eq(true),
+ "Could not find sharding key column #{table_name}.#{column_name}"
+ expect(referenced_table_name).to be_in(allowed_sharding_key_referenced_tables)
+ end
+ end
+ end
+
+ it 'ensures all sharding_key columns are not nullable or have a not null check constraint',
+ :aggregate_failures do
+ all_tables_to_sharding_key.each do |table_name, sharding_key|
+ sharding_key.each do |column_name, _|
+ not_nullable = not_nullable?(table_name, column_name)
+ has_null_check_constraint = has_null_check_constraint?(table_name, column_name)
+
+ if allowed_to_be_missing_not_null.include?("#{table_name}.#{column_name}")
+ expect(not_nullable || has_null_check_constraint).to eq(false),
+ "You must remove `#{table_name}.#{column_name}` from allowed_to_be_missing_not_null" \
+ "since it now has a valid constraint."
+ else
+ expect(not_nullable || has_null_check_constraint).to eq(true),
+ "Missing a not null constraint for `#{table_name}.#{column_name}` . " \
+ "All sharding keys must be not nullable or have a NOT NULL check constraint"
+ end
+ end
+ end
+ end
+
+ it 'only allows `allowed_to_be_missing_sharding_key` to include tables that are missing a sharding_key',
+ :aggregate_failures do
+ allowed_to_be_missing_sharding_key.each do |exempted_table|
+ expect(tables_missing_sharding_key(starting_from_milestone: starting_from_milestone)).to include(exempted_table),
+ "`#{exempted_table}` is not missing a `sharding_key`. " \
+ "You must remove this table from the `allowed_to_be_missing_sharding_key` list."
+ end
+ end
+
+ private
+
+ def error_message(table_name)
+ <<~HEREDOC
+ The table `#{table_name}` is missing a `sharding_key` in the `db/docs` YML file.
+ Starting from GitLab #{starting_from_milestone}, we expect all new tables to define a `sharding_key`.
+
+ To choose an appropriate sharding_key for this table please refer
+ to our guidelines at https://docs.gitlab.com/ee/development/database/multiple_databases.html#defining-a-sharding-key-for-all-cell-local-tables, or consult with the Tenant Scale group.
+ HEREDOC
+ end
+
+ def tables_missing_sharding_key(starting_from_milestone:)
+ ::Gitlab::Database::Dictionary.entries.select do |entry|
+ entry.sharding_key.blank? &&
+ entry.milestone.to_f >= starting_from_milestone &&
+ ::Gitlab::Database::GitlabSchema.cell_local?(entry.gitlab_schema)
+ end.map(&:table_name)
+ end
+
+ def all_tables_to_sharding_key
+ entries_with_sharding_key = ::Gitlab::Database::Dictionary.entries.select do |entry|
+ entry.sharding_key.present?
+ end
+
+ entries_with_sharding_key.to_h do |entry|
+ [entry.table_name, entry.sharding_key]
+ end
+ end
+
+ def not_nullable?(table_name, column_name)
+ sql = <<~SQL
+ SELECT 1
+ FROM information_schema.columns
+ WHERE table_schema = 'public' AND
+ table_name = '#{table_name}' AND
+ column_name = '#{column_name}' AND
+ is_nullable = 'NO'
+ SQL
+
+ result = ApplicationRecord.connection.execute(sql)
+
+ result.count > 0
+ end
+
+ def has_null_check_constraint?(table_name, column_name)
+ # This is a heuristic query to look for all check constraints on the table and see if any of them contain a clause
+ # column IS NOT NULL. This is to match tables that will have multiple sharding keys where either of them can be not
+ # null. Such cases may look like:
+ # (project_id IS NOT NULL) OR (group_id IS NOT NULL)
+ # It's possible that this will sometimes incorrectly find a check constraint that isn't exactly as strict as we want
+ # but it should be pretty unlikely.
+ sql = <<~SQL
+ SELECT 1
+ FROM pg_constraint
+ INNER JOIN pg_class ON pg_constraint.conrelid = pg_class.oid
+ WHERE pg_class.relname = '#{table_name}'
+ AND contype = 'c'
+ AND pg_get_constraintdef(pg_constraint.oid) ILIKE '%#{column_name} IS NOT NULL%'
+ SQL
+
+ result = ApplicationRecord.connection.execute(sql)
+
+ result.count > 0
+ end
+
+ def column_exists?(table_name, column_name)
+ sql = <<~SQL
+ SELECT 1
+ FROM information_schema.columns
+ WHERE table_schema = 'public' AND
+ table_name = '#{table_name}' AND
+ column_name = '#{column_name}';
+ SQL
+
+ result = ApplicationRecord.connection.execute(sql)
+
+ result.count > 0
+ end
+end
diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb
index 778212add66..2d5a59a2d5d 100644
--- a/spec/lib/gitlab/database/transaction/observer_spec.rb
+++ b/spec/lib/gitlab/database/transaction/observer_spec.rb
@@ -21,6 +21,8 @@ RSpec.describe Gitlab::Database::Transaction::Observer, feature_category: :datab
it 'tracks transaction data', :aggregate_failures do
ActiveRecord::Base.transaction do
+ User.first
+
ActiveRecord::Base.transaction(requires_new: true) do
User.first
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 7e0435c815b..624e2b5c144 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -244,9 +244,9 @@ RSpec.describe Gitlab::Database::WithLockRetries, feature_category: :database do
it 'executes `SET LOCAL lock_timeout` using the configured timeout value in milliseconds' do
expect(connection).to receive(:execute).with("RESET idle_in_transaction_session_timeout; RESET lock_timeout").and_call_original
- expect(connection).to receive(:execute).with("SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:create_savepoint).with('active_record_1')
expect(connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
- expect(connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:release_savepoint).with('active_record_1')
subject.run {}
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index bc4fc49b1b7..2cd27472440 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::File do
include RepoHelpers
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:commit) { project.commit(sample_commit.id) }
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
@@ -51,6 +51,31 @@ RSpec.describe Gitlab::Diff::File do
project.commit(branch_name).diffs.diff_files.first
end
+ describe 'delegated methods' do
+ subject { diff_file }
+
+ %i[
+ new_file?
+ deleted_file?
+ renamed_file?
+ unidiff
+ old_path
+ new_path
+ a_mode
+ b_mode
+ mode_changed?
+ submodule?
+ expanded?
+ too_large?
+ collapsed?
+ line_count
+ has_binary_notice?
+ generated?
+ ].each do |method|
+ it { is_expected.to delegate_method(method).to(:diff) }
+ end
+ end
+
describe '#initialize' do
let(:commit) { project.commit("532c837") }
diff --git a/spec/lib/gitlab/doctor/reset_tokens_spec.rb b/spec/lib/gitlab/doctor/reset_tokens_spec.rb
index 0cc947efdb4..b2155ee83ad 100644
--- a/spec/lib/gitlab/doctor/reset_tokens_spec.rb
+++ b/spec/lib/gitlab/doctor/reset_tokens_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Doctor::ResetTokens, feature_category: :runner_fleet do
+RSpec.describe Gitlab::Doctor::ResetTokens, feature_category: :fleet_visibility do
let(:logger) { instance_double('Logger') }
let(:model_names) { %w[Project Group] }
let(:token_names) { %w[runners_token] }
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index c7b69f39951..30514b531dc 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
+RSpec.describe Gitlab::Email::Handler::CreateNoteHandler, feature_category: :shared do
include_context 'email shared context'
- let_it_be(:user) { create(:user, email: 'jake@adventuretime.ooo') }
- let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be_with_reload(:user) { create(:user, email: 'jake@adventuretime.ooo') }
+ let_it_be(:project) { create(:project, :public, :repository) }
let(:noteable) { note.noteable }
let(:note) { create(:diff_note_on_merge_request, project: project) }
@@ -133,14 +133,16 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
- context 'mail key is in the References header' do
+ context 'when mail key is in the References header' do
let(:email_raw) { fixture_file('emails/reply_without_subaddressing_and_key_inside_references.eml') }
it_behaves_like 'an email that contains a mail key', 'References'
end
- context 'mail key is in the References header with a comma' do
- let(:email_raw) { fixture_file('emails/reply_without_subaddressing_and_key_inside_references_with_a_comma.eml') }
+ context 'when mail key is in the References header with a comma' do
+ let(:email_raw) do
+ fixture_file('emails/reply_without_subaddressing_and_key_inside_references_with_a_comma.eml')
+ end
it_behaves_like 'an email that contains a mail key', 'References'
end
@@ -228,4 +230,110 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
end
+
+ context 'when issue is closed' do
+ let_it_be(:noteable) { create(:issue, :closed, :confidential, project: project) }
+ let_it_be(:note) { create(:note, noteable: noteable, project: project) }
+
+ let!(:sent_notification) do
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(true)
+ SentNotification.record_note(note, Users::Internal.support_bot.id)
+ end
+
+ let(:reply_address) { "support+#{sent_notification.reply_key}@example.com" }
+ let(:reopen_note) { noteable.notes.last }
+ let(:email_raw) do
+ <<~EMAIL
+ From: from@example.com
+ To: #{reply_address}
+ Subject: Issue title
+
+ Issue description
+ EMAIL
+ end
+
+ before do
+ stub_incoming_email_setting(enabled: true, address: 'support+%{key}@example.com')
+ end
+
+ it 'does not reopen issue but adds external participants comment' do
+ # Only 1 from received email
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ expect(noteable).to be_closed
+ end
+
+ context 'when noteable is a commit' do
+ let!(:note) { create(:note_on_commit, project: project) }
+ let!(:noteable) { note.noteable }
+
+ let!(:sent_notification) do
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(true)
+ SentNotification.record_note(note, Users::Internal.support_bot.id)
+ end
+
+ it 'does not reopen issue but adds external participants comment' do
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ end
+ end
+
+ context 'when reopen_issue_on_external_participant_note is true' do
+ shared_examples 'an automatically reopened issue' do
+ it 'reopens issue, adds external participants comment and reopen comment' do
+ # 1 from received email and 1 reopen comment
+ expect { receiver.execute }.to change { noteable.notes.count }.by(2)
+ expect(noteable.reset).to be_open
+
+ expect(reopen_note).to be_confidential
+ expect(reopen_note.author).to eq(Users::Internal.support_bot)
+ expect(reopen_note.note).to include(reopen_comment_body)
+ end
+ end
+
+ let!(:settings) do
+ create(:service_desk_setting, project: project, reopen_issue_on_external_participant_note: true)
+ end
+
+ let(:reopen_comment_body) do
+ s_(
+ "ServiceDesk|This issue has been reopened because it received a new comment from an external participant."
+ )
+ end
+
+ it_behaves_like 'an automatically reopened issue'
+
+ it 'does not contain an assignee mention' do
+ receiver.execute
+ expect(reopen_note.note).not_to include("@")
+ end
+
+ context 'when issue is assigned to a user' do
+ before do
+ noteable.update!(assignees: [user])
+ end
+
+ it_behaves_like 'an automatically reopened issue'
+
+ it 'contains an assignee mention' do
+ receiver.execute
+ expect(reopen_note.note).to include(user.to_reference)
+ end
+ end
+
+ context 'when issue is assigned to multiple users' do
+ let_it_be(:another_user) { create(:user) }
+
+ before do
+ noteable.update!(assignees: [user, another_user])
+ end
+
+ it_behaves_like 'an automatically reopened issue'
+
+ it 'contains two assignee mentions' do
+ receiver.execute
+ expect(reopen_note.note).to include(user.to_reference)
+ expect(reopen_note.note).to include(another_user.to_reference)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index f13fd0be4cd..9d484198cc0 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
let_it_be(:group) { create(:group, :private, :crm_enabled, name: "email") }
+ let(:expected_subject) { "The message subject! @all" }
let(:expected_description) do
"Service desk stuff!\n\n```\na = b\n```\n\n`/label ~label1`\n`/assign @user1`\n`/close`\n![image](uploads/image.png)"
end
@@ -43,7 +44,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
expect(new_issue.author).to eql(Users::Internal.support_bot)
expect(new_issue.confidential?).to be true
expect(new_issue.all_references.all).to be_empty
- expect(new_issue.title).to eq("The message subject! @all")
+ expect(new_issue.title).to eq(expected_subject)
expect(new_issue.description).to eq(expected_description.strip)
expect(new_issue.email&.email_message_id).to eq(message_id)
end
@@ -115,6 +116,40 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
it_behaves_like 'a new issue request'
+ context 'when more than the defined limit of participants are in Cc header' do
+ before do
+ stub_const("IssueEmailParticipants::CreateService::MAX_NUMBER_OF_RECORDS", 6)
+ end
+
+ let(:cc_addresses) { Array.new(6) { |i| "user#{i}@example.com" }.join(', ') }
+ let(:author_email) { 'from@example.com' }
+ let(:expected_subject) { "Issue title" }
+ let(:expected_description) do
+ <<~DESC
+ Issue description
+
+ ![image](uploads/image.png)
+ DESC
+ end
+
+ let(:email_raw) do
+ <<~EMAIL
+ From: #{author_email}
+ To: #{to_address}
+ Cc: #{cc_addresses}
+ Message-ID: <#{message_id}>
+ Subject: #{expected_subject}
+
+ Issue description
+ EMAIL
+ end
+
+ # Author email plus 5 from Cc
+ let(:issue_email_participants_count) { 6 }
+
+ it_behaves_like 'a new issue request'
+ end
+
context 'when no CC header is present' do
let(:email_raw) do
<<~EMAIL
@@ -462,7 +497,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
end
end
- shared_examples 'a handler that does not verify the custom email' do |error_identifier|
+ shared_examples 'a handler that does not verify the custom email' do
it 'does not verify the custom email address' do
# project has no owner, so only notify verification triggerer
expect(Notify).to receive(:service_desk_verification_result_email).once
@@ -477,20 +512,32 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
end
end
- shared_examples 'a handler that verifies Service Desk custom email verification emails' do
+ context 'when using incoming_email address' do
+ before do
+ stub_incoming_email_setting(enabled: true, address: 'support+%{key}@example.com')
+ end
+
it_behaves_like 'an early exiting handler'
context 'with valid service desk settings' do
let_it_be(:user) { create(:user) }
+ let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
- let!(:settings) { create(:service_desk_setting, project: project, custom_email: 'custom-support-email@example.com') }
- let!(:verification) { create(:service_desk_custom_email_verification, project: project, token: 'ZROT4ZZXA-Y6', triggerer: user) }
+ let_it_be_with_reload(:settings) do
+ create(:service_desk_setting, project: project, custom_email: 'custom-support-email@example.com')
+ end
+
+ let_it_be_with_reload(:verification) do
+ create(:service_desk_custom_email_verification, project: project, token: 'ZROT4ZZXA-Y6', triggerer: user)
+ end
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
- before do
+ before_all do
project.add_maintainer(user)
+ end
+ before do
allow(message_delivery).to receive(:deliver_later)
allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
end
@@ -521,7 +568,9 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
verification.update!(token: 'XXXXXXXXXXXX')
end
- it_behaves_like 'a handler that does not verify the custom email', 'incorrect_token'
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'incorrect_token' }
+ end
end
context 'and verification email ingested too late' do
@@ -529,7 +578,9 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
verification.update!(triggered_at: ServiceDesk::CustomEmailVerification::TIMEFRAME.ago)
end
- it_behaves_like 'a handler that does not verify the custom email', 'mail_not_received_within_timeframe'
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'mail_not_received_within_timeframe' }
+ end
end
context 'and from header differs from custom email address' do
@@ -537,29 +588,13 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
settings.update!(custom_email: 'different-from@example.com')
end
- it_behaves_like 'a handler that does not verify the custom email', 'incorrect_from'
- end
- end
-
- context 'when service_desk_custom_email feature flag is disabled' do
- before do
- stub_feature_flags(service_desk_custom_email: false)
- end
-
- it 'does not trigger the verification process and adds an issue instead' do
- expect { receiver.execute }.to change { Issue.count }.by(1)
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'incorrect_from' }
+ end
end
end
end
- context 'when using incoming_email address' do
- before do
- stub_incoming_email_setting(enabled: true, address: 'support+%{key}@example.com')
- end
-
- it_behaves_like 'a handler that verifies Service Desk custom email verification emails'
- end
-
context 'when using service_desk_email address' do
let(:receiver) { Gitlab::Email::ServiceDeskReceiver.new(email_raw) }
@@ -567,7 +602,35 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se
stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
end
- it_behaves_like 'a handler that verifies Service Desk custom email verification emails'
+ it_behaves_like 'an early exiting handler'
+
+ context 'with valid service desk settings' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:credentials) { create(:service_desk_custom_email_credential, project: project) }
+
+ let_it_be_with_reload(:settings) do
+ create(:service_desk_setting, project: project, custom_email: 'custom-support-email@example.com')
+ end
+
+ let_it_be_with_reload(:verification) do
+ create(:service_desk_custom_email_verification, project: project, token: 'ZROT4ZZXA-Y6', triggerer: user)
+ end
+
+ let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ before do
+ allow(message_delivery).to receive(:deliver_later)
+ allow(Notify).to receive(:service_desk_verification_result_email).and_return(message_delivery)
+ end
+
+ it_behaves_like 'a handler that does not verify the custom email' do
+ let(:error_identifier) { 'incorrect_forwarding_target' }
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index c86a83092a4..aff5928c3da 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe Gitlab::Email::Receiver do
+RSpec.describe Gitlab::Email::Receiver, feature_category: :shared do
include_context 'email shared context'
- let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:project) { create(:project) }
let(:metric_transaction) { instance_double(Gitlab::Metrics::WebTransaction) }
shared_examples 'successful receive' do
@@ -130,6 +130,63 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'successful receive'
end
+
+ context 'when Service Desk custom email reply address in To header and no References header exists' do
+ let_it_be_with_refind(:setting) { create(:service_desk_setting, project: project, add_external_participants_from_cc: true) }
+
+ let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let!(:verification) { create(:service_desk_custom_email_verification, :finished, project: project) }
+ let(:incoming_email) { "incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com" }
+ let(:reply_key) { "5de1a83a6fc3c9fe34d756c7f484159e" }
+ let(:custom_email_reply) { "support+#{reply_key}@example.com" }
+
+ context 'when custom email is enabled' do
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: #{incoming_email}
+ From: jake@example.com
+ To: #{custom_email_reply}
+ Subject: Reply titile
+
+ Reply body
+ EMAIL
+ end
+
+ let(:meta_key) { :to_address }
+ let(:meta_value) { [custom_email_reply] }
+
+ before do
+ project.reset
+ setting.update!(custom_email: 'support@example.com', custom_email_enabled: true)
+ end
+
+ it_behaves_like 'successful receive' do
+ let(:mail_key) { reply_key }
+ end
+
+ # Email forwarding using a transport rule in Microsoft 365 adds the forwarding
+ # target to the `To` header. We have to select te custom email reply address
+ # before the incoming address (forwarding target)
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/426269#note_1629170865 for email structure
+ context 'when also Service Desk incoming address in To header' do
+ let(:email_raw) do
+ <<~EMAIL
+ From: jake@example.com
+ To: #{custom_email_reply}, #{incoming_email}
+ Subject: Reply titile
+
+ Reply body
+ EMAIL
+ end
+
+ let(:meta_value) { [custom_email_reply, incoming_email] }
+
+ it_behaves_like 'successful receive' do
+ let(:mail_key) { reply_key }
+ end
+ end
+ end
+ end
end
context 'when we cannot find a capable handler' do
diff --git a/spec/lib/gitlab/email/service_desk/custom_email_spec.rb b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
index bba1ca1c8be..bdf31567251 100644
--- a/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
+++ b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb
@@ -6,10 +6,9 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
let(:reply_key) { 'b7721fc7e8419911a8bea145236a0519' }
let(:custom_email) { 'support@example.com' }
let(:email_with_reply_key) { 'support+b7721fc7e8419911a8bea145236a0519@example.com' }
+ let_it_be(:project) { create(:project) }
describe '.reply_address' do
- let_it_be(:project) { create(:project) }
-
subject(:reply_address) { described_class.reply_address(nil, nil) }
it { is_expected.to be nil }
@@ -34,4 +33,38 @@ RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :servi
end
end
end
+
+ describe '.key_from_reply_address' do
+ let(:email) { email_with_reply_key }
+
+ subject(:reply_address) { described_class.key_from_reply_address(email) }
+
+ it { is_expected.to be nil }
+
+ context 'with service_desk_setting' do
+ let_it_be_with_refind(:setting) do
+ create(:service_desk_setting, project: project, add_external_participants_from_cc: true)
+ end
+
+ it { is_expected.to be nil }
+
+ context 'with custom email' do
+ let!(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let!(:verification) { create(:service_desk_custom_email_verification, :finished, project: project) }
+
+ before do
+ project.reset
+ setting.update!(custom_email: 'support@example.com', custom_email_enabled: true)
+ end
+
+ it { is_expected.to eq reply_key }
+ end
+ end
+
+ context 'without reply key' do
+ let(:email) { custom_email }
+
+ it { is_expected.to be nil }
+ end
+ end
end
diff --git a/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb b/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
index 38745fe0cde..932c1b2fb4c 100644
--- a/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
+++ b/spec/lib/gitlab/error_tracking/context_payload_generator_spec.rb
@@ -64,8 +64,11 @@ RSpec.describe Gitlab::ErrorTracking::ContextPayloadGenerator do
end
context 'when the GITLAB_SENTRY_EXTRA_TAGS env is a JSON hash' do
- it 'includes those tags in all events' do
+ before do
stub_env('GITLAB_SENTRY_EXTRA_TAGS', { foo: 'bar', baz: 'quux' }.to_json)
+ end
+
+ it 'includes those tags in all events' do
payload = {}
Gitlab::ApplicationContext.with_context(feature_category: 'feature_a') do
@@ -87,6 +90,26 @@ RSpec.describe Gitlab::ErrorTracking::ContextPayloadGenerator do
generator.generate(exception, extra)
end
+
+ context 'with generated tags' do
+ it 'includes all tags' do
+ payload = {}
+
+ Gitlab::ApplicationContext.with_context(feature_category: 'feature_a') do
+ payload = generator.generate(exception, extra, { 'mytag' => '123' })
+ end
+
+ expect(payload[:tags]).to eql(
+ correlation_id: 'cid',
+ locale: 'en',
+ program: 'test',
+ feature_category: 'feature_a',
+ 'foo' => 'bar',
+ 'baz' => 'quux',
+ 'mytag' => '123'
+ )
+ end
+ end
end
context 'when the GITLAB_SENTRY_EXTRA_TAGS env is not a JSON hash' do
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 79016335a40..c9b2e21d934 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -97,6 +97,27 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
)
end.to raise_error(RuntimeError, /boom/)
end
+
+ context 'with tags' do
+ let(:tags) { { 'mytag' => 2 } }
+
+ before do
+ sentry_payload[:tags].merge!(tags)
+ end
+
+ it 'includes additional tags' do
+ expect(Raven).to receive(:capture_exception).with(exception, sentry_payload)
+ expect(Sentry).to receive(:capture_exception).with(exception, sentry_payload)
+
+ expect do
+ described_class.track_and_raise_for_dev_exception(
+ exception,
+ { issue_url: issue_url, some_other_info: 'info' },
+ tags
+ )
+ end.to raise_error(RuntimeError, /boom/)
+ end
+ end
end
context 'when exceptions for dev should not be raised' do
@@ -181,8 +202,10 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
end
describe '.track_exception' do
+ let(:tags) { {} }
+
subject(:track_exception) do
- described_class.track_exception(exception, extra)
+ described_class.track_exception(exception, extra, tags)
end
before do
@@ -207,6 +230,18 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(logger_payload)
end
+ context 'with tags' do
+ let(:tags) { { 'mytag' => 2 } }
+
+ it 'includes the tags' do
+ track_exception
+
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
+ hash_including({ 'tags.mytag' => 2 })
+ )
+ end
+ end
+
context 'with filterable parameters' do
let(:extra) { { test: 1, my_token: 'test' } }
diff --git a/spec/lib/gitlab/event_store/event_spec.rb b/spec/lib/gitlab/event_store/event_spec.rb
index 97f6870a5ec..edcb0e5dd1a 100644
--- a/spec/lib/gitlab/event_store/event_spec.rb
+++ b/spec/lib/gitlab/event_store/event_spec.rb
@@ -1,8 +1,10 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'json_schemer'
+require 'oj'
-RSpec.describe Gitlab::EventStore::Event do
+RSpec.describe Gitlab::EventStore::Event, feature_category: :shared do
let(:event_class) { stub_const('TestEvent', Class.new(described_class)) }
let(:event) { event_class.new(data: data) }
let(:data) { { project_id: 123, project_path: 'org/the-project' } }
@@ -42,6 +44,14 @@ RSpec.describe Gitlab::EventStore::Event do
it 'initializes the event correctly' do
expect(event.data).to eq(data)
end
+
+ it 'validates schema' do
+ expect(event_class.json_schema_valid).to eq(nil)
+
+ event
+
+ expect(event_class.json_schema_valid).to eq(true)
+ end
end
context 'when some properties are missing' do
@@ -59,6 +69,31 @@ RSpec.describe Gitlab::EventStore::Event do
expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent, 'Event data must be a Hash')
end
end
+
+ context 'when schema is invalid' do
+ before do
+ event_class.class_eval do
+ def schema
+ {
+ 'required' => ['project_id'],
+ 'type' => 'object',
+ 'properties' => {
+ 'project_id' => { 'type' => 'int' },
+ 'project_path' => { 'type' => 'string ' }
+ }
+ }
+ end
+ end
+ end
+
+ it 'raises an error' do
+ expect(event_class.json_schema_valid).to eq(nil)
+
+ expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent, 'Schema for event TestEvent is invalid')
+
+ expect(event_class.json_schema_valid).to eq(false)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/event_store/store_spec.rb b/spec/lib/gitlab/event_store/store_spec.rb
index 04d0706c130..e747027db98 100644
--- a/spec/lib/gitlab/event_store/store_spec.rb
+++ b/spec/lib/gitlab/event_store/store_spec.rb
@@ -263,12 +263,59 @@ RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do
end
end
+ describe '#publish_group' do
+ let(:event1) { event_klass.new(data: { name: 'Bob', id: 123 }) }
+ let(:event2) { event_klass.new(data: { name: 'Alice', id: 456 }) }
+ let(:event3) { event_klass.new(data: { name: 'Eva', id: 789 }) }
+
+ let(:group_size) { 3 }
+ let(:events) { [event1, event2, event3] }
+ let(:serialized_data) { events.map(&:data).map(&:deep_stringify_keys) }
+
+ let(:store) do
+ described_class.new do |s|
+ s.subscribe worker, to: event_klass, group_size: group_size
+ end
+ end
+
+ subject { store.publish_group(events) }
+
+ context 'with valid events' do
+ it 'calls consume_events of subscription' do
+ expect(store.subscriptions[event_klass].first).to receive(:consume_events).with(events)
+
+ subject
+ end
+ end
+
+ context 'when there is invalid event' do
+ let(:events) { [event1, invalid_event] }
+
+ context 'when event is invalid' do
+ let(:invalid_event) { stub_const('TestEvent', {}) }
+
+ it 'raises InvalidEvent error' do
+ expect { subject }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+
+ context 'when one of the events is a different event' do
+ let(:invalid_event) { stub_const('DifferentEvent', Class.new(Gitlab::EventStore::Event)) }
+
+ it 'raises InvalidEvent error' do
+ expect { subject }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+ end
+
describe 'subscriber' do
let(:data) { { name: 'Bob', id: 123 } }
+ let(:event_data) { data }
let(:event_name) { event.class.name }
let(:worker_instance) { worker.new }
- subject { worker_instance.perform(event_name, data) }
+ subject { worker_instance.perform(event_name, event_data) }
it 'is a Sidekiq worker' do
expect(worker_instance).to be_a(ApplicationWorker)
@@ -278,7 +325,7 @@ RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do
expect(worker_instance).to receive(:handle_event).with(instance_of(event.class))
expect_any_instance_of(event.class) do |event|
- expect(event).to receive(:data).and_return(data)
+ expect(event).to receive(:data).and_return(event_data)
end
subject
@@ -299,5 +346,24 @@ RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do
expect { subject }.to raise_error(NotImplementedError)
end
end
+
+ context 'when there are multiple events' do
+ let(:event_data) { [{ name: 'Bob', id: 123 }, { name: 'Alice', id: 456 }] }
+
+ let(:first_event) { event_klass.new(data: event_data.first) }
+ let(:second_event) { event_klass.new(data: event_data.last) }
+
+ before do
+ allow(worker_instance).to receive(:construct_event).with(event_klass, event_data.first).and_return(first_event)
+ allow(worker_instance).to receive(:construct_event).with(event_klass, event_data.last).and_return(second_event)
+ end
+
+ it 'calls handle_event multiple times' do
+ expect(worker_instance).to receive(:handle_event).once.with(first_event)
+ expect(worker_instance).to receive(:handle_event).once.with(second_event)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/event_store/subscription_spec.rb b/spec/lib/gitlab/event_store/subscription_spec.rb
new file mode 100644
index 00000000000..2a87f48be10
--- /dev/null
+++ b/spec/lib/gitlab/event_store/subscription_spec.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EventStore::Subscription, feature_category: :shared do
+ let(:worker) do
+ stub_const('EventSubscriber', Class.new).tap do |klass|
+ klass.class_eval do
+ include Gitlab::EventStore::Subscriber
+
+ def handle_event(event)
+ event.data
+ end
+ end
+ end
+ end
+
+ let(:event_klass) { stub_const('TestEvent', Class.new(Gitlab::EventStore::Event)) }
+ let(:event) { event_klass.new(data: data) }
+
+ let(:delay) { nil }
+ let(:condition) { nil }
+ let(:group_size) { nil }
+
+ subject(:subscription) { described_class.new(worker, condition, delay, group_size) }
+
+ before do
+ event_klass.class_eval do
+ def schema
+ {
+ 'required' => %w[name id],
+ 'type' => 'object',
+ 'properties' => {
+ 'name' => { 'type' => 'string' },
+ 'id' => { 'type' => 'integer' }
+ }
+ }
+ end
+ end
+ end
+
+ describe '#consume_events' do
+ let(:event1) { event_klass.new(data: { name: 'Bob', id: 123 }) }
+ let(:event2) { event_klass.new(data: { name: 'Alice', id: 456 }) }
+ let(:event3) { event_klass.new(data: { name: 'Eva', id: 789 }) }
+
+ let(:group_size) { 3 }
+ let(:events) { [event1, event2, event3] }
+ let(:serialized_data) { events.map(&:data).map(&:deep_stringify_keys) }
+
+ subject(:consume_events) { subscription.consume_events(events) }
+
+ context 'with invalid events' do
+ let(:events) { [event1, invalid_event] }
+
+ context 'when event is invalid' do
+ let(:invalid_event) { stub_const('TestEvent', Class.new { attr_reader :data }).new }
+
+ it 'raises InvalidEvent error' do
+ expect { consume_events }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+
+ context 'when one of the events is a different event' do
+ let(:invalid_event_klass) { stub_const('DifferentEvent', Class.new(Gitlab::EventStore::Event)) }
+ let(:invalid_event) { invalid_event_klass.new(data: {}) }
+
+ before do
+ invalid_event_klass.class_eval do
+ def schema
+ {
+ 'type' => 'object',
+ 'properties' => {}
+ }
+ end
+ end
+ end
+
+ it 'raises InvalidEvent error' do
+ expect { consume_events }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+
+ context 'when grouped events size is more than batch scheduling size' do
+ let(:group_size) { 2 }
+
+ before do
+ stub_const("#{described_class}::SCHEDULING_BATCH_SIZE", 1)
+ end
+
+ it 'dispatches the events to the worker with batch parameters' do
+ expect(worker).to receive(:bulk_perform_in).with(
+ 1.second,
+ [['TestEvent', serialized_data.take(2)], ['TestEvent', serialized_data.drop(2)]],
+ batch_size: 1,
+ batch_delay: 10.seconds
+ )
+
+ consume_events
+ end
+
+ context 'with delayed dispatching of event' do
+ let(:delay) { 1.minute }
+
+ it 'dispatches the events to the worker with batch parameters and delay' do
+ expect(worker).to receive(:bulk_perform_in).with(
+ 1.minute,
+ [['TestEvent', serialized_data.take(2)], ['TestEvent', serialized_data.drop(2)]],
+ batch_size: 1,
+ batch_delay: 10.seconds
+ )
+
+ consume_events
+ end
+ end
+ end
+
+ context 'when subscription has grouped dispatching of events' do
+ let(:group_size) { 2 }
+
+ it 'dispatches the events to the worker in group' do
+ expect(worker).to receive(:bulk_perform_async).once.with([
+ ['TestEvent', serialized_data.take(2)],
+ ['TestEvent', serialized_data.drop(2)]
+ ])
+
+ consume_events
+ end
+ end
+
+ context 'when subscription has delayed dispatching of event' do
+ let(:delay) { 1.minute }
+
+ it 'dispatches the events to the worker after some time' do
+ expect(worker).to receive(:bulk_perform_in).with(1.minute, [['TestEvent', serialized_data]])
+
+ consume_events
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/exclusive_lease_spec.rb b/spec/lib/gitlab/exclusive_lease_spec.rb
index 80154c729e3..a02e2625c5e 100644
--- a/spec/lib/gitlab/exclusive_lease_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::ExclusiveLease, :request_store,
- :clean_gitlab_redis_cluster_shared_state, feature_category: :shared do
+ :clean_gitlab_redis_shared_state, feature_category: :shared do
let(:unique_key) { SecureRandom.hex(10) }
describe '#try_obtain' do
diff --git a/spec/lib/gitlab/experiment/rollout/feature_spec.rb b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
index 6d01b7a175f..9d602083ad6 100644
--- a/spec/lib/gitlab/experiment/rollout/feature_spec.rb
+++ b/spec/lib/gitlab/experiment/rollout/feature_spec.rb
@@ -3,50 +3,25 @@
require 'spec_helper'
RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_category: :acquisition do
- subject { described_class.new(subject_experiment) }
+ subject(:experiment_instance) { described_class.new(subject_experiment) }
let(:subject_experiment) { experiment('namespaced/stub') }
- describe "#enabled?", :saas do
+ describe "#enabled?" do
before do
stub_feature_flags(gitlab_experiment: true)
- allow(subject).to receive(:feature_flag_defined?).and_return(true)
- allow(subject).to receive(:feature_flag_instance).and_return(double(state: :on))
+ allow(experiment_instance).to receive(:feature_flag_defined?).and_return(true)
+ allow(experiment_instance)
+ .to receive(:feature_flag_instance).and_return(instance_double('Flipper::Feature', state: :on))
end
- it "is enabled when all criteria are met" do
- expect(subject).to be_enabled
- end
-
- it "isn't enabled if the feature definition doesn't exist" do
- expect(subject).to receive(:feature_flag_defined?).and_return(false)
-
- expect(subject).not_to be_enabled
- end
-
- it "isn't enabled if we're not in dev or dotcom environments" do
- expect(Gitlab).to receive(:com?).and_return(false)
-
- expect(subject).not_to be_enabled
- end
-
- it "isn't enabled if the feature flag state is :off" do
- expect(subject).to receive(:feature_flag_instance).and_return(double(state: :off))
-
- expect(subject).not_to be_enabled
- end
-
- it "isn't enabled if the gitlab_experiment feature flag is false" do
- stub_feature_flags(gitlab_experiment: false)
-
- expect(subject).not_to be_enabled
- end
+ it { is_expected.not_to be_enabled }
end
describe "#execute_assignment" do
let(:variants) do
->(e) do
- # rubocop:disable Lint/EmptyBlock
+ # rubocop:disable Lint/EmptyBlock -- Specific for test
e.control {}
e.variant(:red) {}
e.variant(:blue) {}
@@ -63,26 +38,26 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
it "uses the default value as specified in the yaml" do
expect(Feature).to receive(:enabled?).with(
'namespaced_stub',
- subject,
+ experiment_instance,
type: :experiment
).and_return(false)
- expect(subject.execute_assignment).to be_nil
+ expect(experiment_instance.execute_assignment).to be_nil
end
it "returns an assigned name" do
- expect(subject.execute_assignment).to eq(:blue)
+ expect(experiment_instance.execute_assignment).to eq(:blue)
end
context "when there are no behaviors" do
- let(:variants) { ->(e) { e.control {} } } # rubocop:disable Lint/EmptyBlock
+ let(:variants) { ->(e) { e.control {} } } # rubocop:disable Lint/EmptyBlock -- Specific for test
it "does not raise an error" do
- expect { subject.execute_assignment }.not_to raise_error
+ expect { experiment_instance.execute_assignment }.not_to raise_error
end
end
- context "for even rollout to non-control", :saas do
+ context "for even rollout to non-control" do
let(:counts) { Hash.new(0) }
let(:subject_experiment) { experiment('namespaced/stub') }
@@ -91,8 +66,8 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
allow(instance).to receive(:enabled?).and_return(true)
end
- subject_experiment.variant(:variant1) {} # rubocop:disable Lint/EmptyBlock
- subject_experiment.variant(:variant2) {} # rubocop:disable Lint/EmptyBlock
+ subject_experiment.variant(:variant1) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
+ subject_experiment.variant(:variant2) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
end
it "rolls out relatively evenly to 2 behaviors" do
@@ -102,7 +77,7 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
end
it "rolls out relatively evenly to 3 behaviors" do
- subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock
+ subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
100.times { |i| run_cycle(subject_experiment, value: i) }
@@ -115,7 +90,7 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
end
it "rolls out with the expected distribution" do
- subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock
+ subject_experiment.variant(:variant3) {} # rubocop:disable Lint/EmptyBlock -- Specific for test
100.times { |i| run_cycle(subject_experiment, value: i) }
@@ -152,14 +127,14 @@ RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment, feature_catego
describe "#flipper_id" do
it "returns the expected flipper id if the experiment doesn't provide one" do
- subject.instance_variable_set(:@experiment, double(id: '__id__'))
- expect(subject.flipper_id).to eq('Experiment;__id__')
+ experiment_instance.instance_variable_set(:@experiment, instance_double('Gitlab::Experiment', id: '__id__'))
+ expect(experiment_instance.flipper_id).to eq('Experiment;__id__')
end
it "lets the experiment provide a flipper id so it can override the default" do
allow(subject_experiment).to receive(:flipper_id).and_return('_my_overridden_id_')
- expect(subject.flipper_id).to eq('_my_overridden_id_')
+ expect(experiment_instance.flipper_id).to eq('_my_overridden_id_')
end
end
end
diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb
index 55bb1804d86..eabc92b794a 100644
--- a/spec/lib/gitlab/file_detector_spec.rb
+++ b/spec/lib/gitlab/file_detector_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::FileDetector do
+RSpec.describe Gitlab::FileDetector, feature_category: :global_search do
describe '.types_in_paths' do
it 'returns the file types for the given paths' do
expect(described_class.types_in_paths(%w[README.md CHANGELOG VERSION VERSION]))
@@ -116,5 +116,9 @@ RSpec.describe Gitlab::FileDetector do
expect(described_class.type_of(type_name)).to be_nil
end
end
+
+ it 'returns the type of a Jenkins config file' do
+ expect(described_class.type_of('jenkinsfile')).to eq(:jenkinsfile)
+ end
end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index d8d62ac9670..6c8634281ae 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
describe "Commit info from gitaly commit" do
let(:subject) { (+"My commit").force_encoding('ASCII-8BIT') }
- let(:body) { subject + (+"My body").force_encoding('ASCII-8BIT') }
let(:body_size) { body.length }
let(:gitaly_commit) { build(:gitaly_commit, subject: subject, body: body, body_size: body_size, tree_id: tree_id) }
let(:id) { gitaly_commit.id }
@@ -17,6 +16,17 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
let(:author) { gitaly_commit.author }
let(:commit) { described_class.new(repository, gitaly_commit) }
+ let(:body) do
+ body = +<<~BODY
+ Bleep bloop.
+
+ Cc: John Doe <johndoe@gitlab.com>
+ Cc: Jane Doe <janedoe@gitlab.com>
+ BODY
+
+ [subject, "\n", body].join.force_encoding("ASCII-8BIT")
+ end
+
it { expect(commit.short_id).to eq(id[0..10]) }
it { expect(commit.id).to eq(id) }
it { expect(commit.sha).to eq(id) }
@@ -29,6 +39,18 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
it { expect(commit.parent_ids).to eq(gitaly_commit.parent_ids) }
it { expect(commit.tree_id).to eq(tree_id) }
+ it "parses the commit trailers" do
+ expect(commit.trailers).to eq(
+ { "Cc" => "Jane Doe <janedoe@gitlab.com>" }
+ )
+ end
+
+ it "parses the extended commit trailers" do
+ expect(commit.extended_trailers).to eq(
+ { "Cc" => ["John Doe <johndoe@gitlab.com>", "Jane Doe <janedoe@gitlab.com>"] }
+ )
+ end
+
context 'non-UTC dates' do
let(:seconds) { Time.now.to_i }
@@ -773,6 +795,7 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
message: "tree css fixes",
parent_ids: ["874797c3a73b60d2187ed6e2fcabd289ff75171e"],
trailers: {},
+ extended_trailers: {},
referenced_by: []
}
end
diff --git a/spec/lib/gitlab/git/compare_spec.rb b/spec/lib/gitlab/git/compare_spec.rb
index 81b5aa94656..5ee5e18d5af 100644
--- a/spec/lib/gitlab/git/compare_spec.rb
+++ b/spec/lib/gitlab/git/compare_spec.rb
@@ -2,11 +2,14 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Compare do
- let_it_be(:repository) { create(:project, :repository).repository.raw }
+RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
- let(:compare) { described_class.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: false) }
- let(:compare_straight) { described_class.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: true) }
+ let(:compare) { described_class.new(repository, base, head, straight: false) }
+ let(:compare_straight) { described_class.new(repository, base, head, straight: true) }
+ let(:base) { SeedRepo::BigCommit::ID }
+ let(:head) { SeedRepo::Commit::ID }
describe '#commits' do
subject do
@@ -109,4 +112,103 @@ RSpec.describe Gitlab::Git::Compare do
it { is_expected.to include('files/ruby/popen.rb') }
it { is_expected.not_to include('LICENSE') }
end
+
+ describe '#generated_files' do
+ subject(:generated_files) { compare.generated_files }
+
+ context 'with a detected generated file' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
+ let_it_be(:branch) { 'generated-file-test' }
+ let_it_be(:base) do
+ project
+ .repository
+ .create_file(
+ project.creator,
+ '.gitattributes',
+ "*.txt gitlab-generated\n",
+ branch_name: branch,
+ message: 'Add .gitattributes file')
+ end
+
+ let_it_be(:head) do
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.rb',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file1')
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.txt',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file2')
+ end
+
+ it 'sets the diff as generated' do
+ expect(generated_files).to eq Set.new(['file1.txt'])
+ end
+
+ context 'when base is nil' do
+ let(:base) { nil }
+
+ it 'does not try to detect generated files' do
+ expect(repository).not_to receive(:detect_generated_files)
+ expect(repository).not_to receive(:find_changed_paths)
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ context 'when head is nil' do
+ let(:head) { nil }
+
+ it 'does not try to detect generated files' do
+ expect(repository).not_to receive(:detect_generated_files)
+ expect(repository).not_to receive(:find_changed_paths)
+ expect(generated_files).to eq Set.new
+ end
+ end
+ end
+
+ context 'with updated .gitattributes in the HEAD' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
+ let_it_be(:branch) { 'generated-file-test' }
+ let_it_be(:head) do
+ project
+ .repository
+ .create_file(
+ project.creator,
+ '.gitattributes',
+ "*.txt gitlab-generated\n",
+ branch_name: branch,
+ message: 'Add .gitattributes file')
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.rb',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file1')
+ project
+ .repository
+ .create_file(
+ project.creator,
+ 'file1.txt',
+ "some content\n",
+ branch_name: branch,
+ message: 'Add file2')
+ end
+
+ it 'does not set any files as generated' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 72ddd0759ec..dc60d486f49 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Git::DiffCollection do
+RSpec.describe Gitlab::Git::DiffCollection, feature_category: :source_code_management do
before do
stub_const('MutatingConstantIterator', Class.new)
@@ -531,6 +531,99 @@ RSpec.describe Gitlab::Git::DiffCollection do
end
describe '#each' do
+ context 'with Gitlab::GitalyClient::DiffStitcher' do
+ let(:collection) do
+ described_class.new(
+ iterator,
+ max_files: max_files,
+ max_lines: max_lines,
+ limits: limits,
+ expanded: expanded,
+ generated_files: generated_files
+ )
+ end
+
+ let(:iterator) { Gitlab::GitalyClient::DiffStitcher.new(diff_params) }
+ let(:diff_params) { [diff_1, diff_2] }
+ let(:diff_1) do
+ OpenStruct.new(
+ to_path: ".gitmodules",
+ from_path: ".gitmodules",
+ old_mode: 0100644,
+ new_mode: 0100644,
+ from_id: '357406f3075a57708d0163752905cc1576fceacc',
+ to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0',
+ patch: 'a' * 10,
+ raw_patch_data: 'a' * 10,
+ end_of_patch: true
+ )
+ end
+
+ let(:diff_2) do
+ OpenStruct.new(
+ to_path: ".gitignore",
+ from_path: ".gitignore",
+ old_mode: 0100644,
+ new_mode: 0100644,
+ from_id: '357406f3075a57708d0163752905cc1576fceacc',
+ to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0',
+ patch: 'a' * 20,
+ raw_patch_data: 'a' * 20,
+ end_of_patch: true
+ )
+ end
+
+ context 'with generated_files' do
+ let(:generated_files) { [diff_1.from_path] }
+
+ it 'sets generated files as generated' do
+ collection.each do |d|
+ if d.old_path == diff_1.from_path
+ expect(d.generated).to be true
+ else
+ expect(d.generated).to be false
+ end
+ end
+ end
+ end
+
+ context 'without generated_files' do
+ let(:generated_files) { nil }
+
+ it 'set generated as nil' do
+ collection.each do |d|
+ expect(d.generated).to be_nil
+ end
+ end
+ end
+ end
+
+ context 'with existing generated value in the hash' do
+ let(:collection) do
+ described_class.new([{ diff: 'some content', generated: true }], options)
+ end
+
+ context 'when collapse_generated on' do
+ let(:options) { { collapse_generated: true } }
+
+ it 'sets the diff as generated' do
+ collection.each do |diff|
+ expect(diff.generated).to eq true
+ end
+ end
+ end
+
+ context 'when collapse_generated off' do
+ let(:options) { { collapse_generated: false } }
+
+ it 'does not set the diff as generated' do
+ collection.each do |diff|
+ expect(diff.generated).to be_nil
+ end
+ end
+ end
+ end
+
context 'when diff are too large' do
let(:collection) do
described_class.new([{ diff: 'a' * 204800 }])
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 6b3630d7a1f..c40445433c0 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -50,7 +50,7 @@ EOT
let(:diff) { described_class.new(@raw_diff_hash) }
it 'initializes the diff' do
- expect(diff.to_hash).to eq(@raw_diff_hash)
+ expect(diff.to_hash).to eq(@raw_diff_hash.merge(generated: nil))
end
it 'does not prune the diff' do
@@ -87,7 +87,7 @@ EOT
let(:raw_patch) { @raw_diff_hash[:diff] }
it 'initializes the diff' do
- expect(diff.to_hash).to eq(@raw_diff_hash)
+ expect(diff.to_hash).to eq(@raw_diff_hash.merge(generated: nil))
end
it 'does not prune the diff' do
@@ -156,6 +156,31 @@ EOT
expect(diff).to be_collapsed
end
end
+
+ context 'when the file is set as generated' do
+ let(:diff) { described_class.new(gitaly_diff, generated: true, expanded: expanded) }
+ let(:raw_patch) { 'some text' }
+
+ context 'when expanded is set to false' do
+ let(:expanded) { false }
+
+ it 'will be marked as generated and collapsed' do
+ expect(diff).to be_generated
+ expect(diff).to be_collapsed
+ expect(diff.diff).to be_empty
+ end
+ end
+
+ context 'when expanded is set to true' do
+ let(:expanded) { true }
+
+ it 'will still be marked as generated, but not as collapsed' do
+ expect(diff).to be_generated
+ expect(diff).not_to be_collapsed
+ expect(diff.diff).not_to be_empty
+ end
+ end
+ end
end
context 'using a Gitaly::CommitDelta' do
@@ -173,7 +198,7 @@ EOT
let(:diff) { described_class.new(commit_delta) }
it 'initializes the diff' do
- expect(diff.to_hash).to eq(@raw_diff_hash.merge(diff: ''))
+ expect(diff.to_hash).to eq(@raw_diff_hash.merge(diff: '', generated: nil))
end
it 'is not too large' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 5791d9c524f..cc07a16d362 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2670,10 +2670,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
subject { new_repository.replicate(repository) }
before do
- stub_storage_settings('test_second_storage' => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings('test_second_storage' => {})
end
after do
@@ -2781,6 +2778,31 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
+ describe '#object_format' do
+ subject { repository.object_format }
+
+ context 'for SHA1 repository' do
+ it { is_expected.to eq :OBJECT_FORMAT_SHA1 }
+ end
+
+ context 'for SHA256 repository' do
+ let(:project) { create(:project, :empty_repo, object_format: Repository::FORMAT_SHA256) }
+ let(:repository) { project.repository.raw }
+
+ it { is_expected.to eq :OBJECT_FORMAT_SHA256 }
+ end
+
+ context 'for removed repository' do
+ let(:repository) { mutable_repository }
+
+ before do
+ repository.remove
+ end
+
+ it { expect { subject }.to raise_error(Gitlab::Git::Repository::NoRepository) }
+ end
+ end
+
describe '#get_file_attributes' do
let(:rev) { 'master' }
let(:paths) { ['file.txt'] }
@@ -2790,4 +2812,69 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
subject { repository.get_file_attributes(rev, paths, attrs) }
end
end
+
+ describe '#detect_generated_files' do
+ let(:project) do
+ create(:project, :custom_repo, files: {
+ '.gitattributes' => gitattr_content,
+ 'file1.txt' => 'first file',
+ 'file2.txt' => 'second file'
+ })
+ end
+
+ let(:repository) { project.repository.raw }
+ let(:rev) { 'master' }
+ let(:paths) { ['file1.txt', 'file2.txt'] }
+
+ subject(:generated_files) { repository.detect_generated_files(rev, paths) }
+
+ context 'when the linguist-generated attribute is used' do
+ let(:gitattr_content) { "*.txt text\nfile1.txt linguist-generated\n" }
+
+ it 'returns generated files only' do
+ expect(generated_files).to contain_exactly('file1.txt')
+ end
+ end
+
+ context 'when the gitlab-generated attribute is used' do
+ let(:gitattr_content) { "*.txt text\nfile1.txt gitlab-generated\n" }
+
+ it 'returns generated files only' do
+ expect(generated_files).to contain_exactly('file1.txt')
+ end
+ end
+
+ context 'when both linguist-generated and gitlab-generated attribute are used' do
+ let(:gitattr_content) { "*.txt text\nfile1.txt linguist-generated gitlab-generated\n" }
+
+ it 'returns generated files only' do
+ expect(generated_files).to contain_exactly('file1.txt')
+ end
+ end
+
+ context 'when the all files are generated' do
+ let(:gitattr_content) { "*.txt gitlab-generated\n" }
+
+ it 'returns all generated files' do
+ expect(generated_files).to eq paths.to_set
+ end
+ end
+
+ context 'when empty paths are given' do
+ let(:paths) { [] }
+ let(:gitattr_content) { "*.txt gitlab-generated\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ context 'when no generated overrides are used' do
+ let(:gitattr_content) { "*.txt text\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
index bdc16f16e66..ddd63159a03 100644
--- a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
@@ -17,26 +17,11 @@ RSpec.describe Gitlab::GitalyClient::ConflictsService do
describe '#conflicts' do
subject(:conflicts) { client.conflicts? }
- context "with the `skip_conflict_files_in_gitaly` feature flag on" do
- it 'calls list_conflict_files with `skip_content: true`' do
- expect_any_instance_of(described_class).to receive(:list_conflict_files)
- .with(skip_content: true).and_return(["let's pretend i'm a conflicted file"])
+ it 'calls list_conflict_files with no parameters' do
+ expect_any_instance_of(described_class).to receive(:list_conflict_files)
+ .and_return(["let's pretend i'm a conflicted file"])
- conflicts
- end
- end
-
- context "with the `skip_conflict_files_in_gitaly` feature flag off" do
- before do
- stub_feature_flags(skip_conflict_files_in_gitaly: false)
- end
-
- it 'calls list_conflict_files with no parameters' do
- expect_any_instance_of(described_class).to receive(:list_conflict_files)
- .with(skip_content: false).and_return(["let's pretend i'm a conflicted file"])
-
- conflicts
- end
+ conflicts
end
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 727bf494ee6..26b96ecf36b 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -355,6 +355,40 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
client.create_repository('feature/新機能')
end
+
+ context 'when object format is provided' do
+ before do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:create_repository)
+ .with(gitaly_request_with_path(storage_name, relative_path)
+ .and(gitaly_request_with_params(default_branch: '', object_format: expected_format)), kind_of(Hash))
+ .and_return(double)
+ end
+
+ context 'with SHA1 format' do
+ let(:expected_format) { :OBJECT_FORMAT_SHA1 }
+
+ it 'sends a create_repository message with object format' do
+ client.create_repository(object_format: Repository::FORMAT_SHA1)
+ end
+ end
+
+ context 'with SHA256 format' do
+ let(:expected_format) { :OBJECT_FORMAT_SHA256 }
+
+ it 'sends a create_repository message with object format' do
+ client.create_repository(object_format: Repository::FORMAT_SHA256)
+ end
+ end
+
+ context 'with unknown format' do
+ let(:expected_format) { :OBJECT_FORMAT_UNSPECIFIED }
+
+ it 'sends a create_repository message with object format' do
+ client.create_repository(object_format: 'unknown')
+ end
+ end
+ end
end
describe '#raw_changes_between' do
@@ -479,6 +513,16 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital
end
end
+ describe '#object_format' do
+ it 'sends a object_format message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:object_format)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+
+ client.object_format
+ end
+ end
+
describe '#get_file_attributes' do
let(:rev) { 'master' }
let(:paths) { ['file.txt'] }
diff --git a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
index 7252f7d6afb..6ea9dfde09d 100644
--- a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
@@ -4,11 +4,11 @@ require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::StorageSettings, feature_category: :gitaly do
describe "#initialize" do
- context 'when the storage contains no path' do
+ context 'when the storage contains no gitaly_address' do
it 'raises an error' do
expect do
described_class.new("foo" => {})
- end.to raise_error(described_class::InvalidConfigurationError)
+ end.to raise_error(described_class::InvalidConfigurationError, described_class::INVALID_STORAGE_MESSAGE)
end
end
@@ -23,21 +23,13 @@ RSpec.describe Gitlab::GitalyClient::StorageSettings, feature_category: :gitaly
context 'when the storage is valid' do
it 'raises no error' do
expect do
- described_class.new("path" => Rails.root)
+ described_class.new("gitaly_address" => "unix:tmp/tests/gitaly/gitaly.socket")
end.not_to raise_error
end
end
end
describe '.gitaly_address' do
- context 'when the storage settings have no gitaly address but one is requested' do
- it 'raises an error' do
- expect do
- described_class.new("path" => Rails.root).gitaly_address
- end.to raise_error("key not found: \"gitaly_address\"")
- end
- end
-
context 'when the storage settings have a gitaly address and one is requested' do
it 'returns the setting value' do
expect(described_class.new("path" => Rails.root, "gitaly_address" => "test").gitaly_address).to eq("test")
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 00639d9574b..796fe75521a 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -40,16 +40,6 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
end
end
- describe '.filesystem_id_from_disk' do
- it 'catches errors' do
- [Errno::ENOENT, Errno::EACCES, JSON::ParserError].each do |error|
- stub_file_read(described_class.storage_metadata_file_path('default'), error: error)
-
- expect(described_class.filesystem_id_from_disk('default')).to be_nil
- end
- end
- end
-
describe '.filesystem_id' do
it 'returns an empty string when the relevant storage status is not found in the response' do
response = double("response")
@@ -361,19 +351,6 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
end
end
- describe '.can_use_disk?' do
- it 'properly caches a false result' do
- # spec_helper stubs this globally
- allow(described_class).to receive(:can_use_disk?).and_call_original
- expect(described_class).to receive(:filesystem_id).once
- expect(described_class).to receive(:filesystem_id_from_disk).once
-
- 2.times do
- described_class.can_use_disk?('unknown')
- end
- end
- end
-
describe '.connection_data' do
it 'returns connection data' do
address = 'tcp://localhost:9876'
@@ -919,4 +896,20 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
it_behaves_like 'with_feature_flag_actor'
end
end
+
+ describe '.execute' do
+ subject(:execute) do
+ described_class.execute('default', :ref_service, :find_local_branches, Gitaly::FindLocalBranchesRequest.new,
+ remote_storage: nil, timeout: 10.seconds)
+ end
+
+ it 'raises an exception when running within a concurrent Ruby thread' do
+ Thread.current[:restrict_within_concurrent_ruby] = true
+
+ expect { execute }.to raise_error(Gitlab::Utils::ConcurrentRubyThreadIsUsedError,
+ "Cannot run 'gitaly' if running from `Concurrent::Promise`.")
+
+ Thread.current[:restrict_within_concurrent_ruby] = nil
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/client_pool_spec.rb b/spec/lib/gitlab/github_import/client_pool_spec.rb
deleted file mode 100644
index aabb47c2cf1..00000000000
--- a/spec/lib/gitlab/github_import/client_pool_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::ClientPool, feature_category: :importers do
- subject(:pool) { described_class.new(token_pool: %w[foo bar], per_page: 1, parallel: true) }
-
- describe '#best_client' do
- it 'returns the client with the most remaining requests' do
- allow(Gitlab::GithubImport::Client).to receive(:new).and_return(
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: true, remaining_requests: 10, rate_limit_resets_in: 1
- ),
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: true, remaining_requests: 20, rate_limit_resets_in: 2
- )
- )
-
- expect(pool.best_client.remaining_requests).to eq(20)
- end
-
- context 'when all clients are rate limited' do
- it 'returns the client with the closest rate limit reset time' do
- allow(Gitlab::GithubImport::Client).to receive(:new).and_return(
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: false, remaining_requests: 10, rate_limit_resets_in: 10
- ),
- instance_double(
- Gitlab::GithubImport::Client,
- requests_remaining?: false, remaining_requests: 20, rate_limit_resets_in: 20
- )
- )
-
- expect(pool.best_client.rate_limit_resets_in).to eq(10)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
index 6f602531d23..c1e9bed5681 100644
--- a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::GithubImport::Importer::CollaboratorsImporter, feature_ca
subject(:importer) { described_class.new(project, client, parallel: parallel) }
let(:parallel) { true }
- let(:project) { instance_double(Project, id: 4, import_source: 'foo/bar', import_state: nil) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar', import_state: nil) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:github_collaborator) do
@@ -74,6 +74,7 @@ RSpec.describe Gitlab::GithubImport::Importer::CollaboratorsImporter, feature_ca
describe '#parallel_import', :clean_gitlab_redis_cache do
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(client).to receive(:collaborators).with(project.import_source, affiliation: 'direct')
.and_return([github_collaborator])
allow(client).to receive(:collaborators).with(project.import_source, affiliation: 'outside')
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 7668451ad4e..bcd38e1e236 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let(:client) { double(:client) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:discussion_id) { 'b0fa404393eeebb4e82becb8104f238812bb1fe6' }
let(:created_at) { Time.new(2017, 1, 1, 12, 00).utc }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15).utc }
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 4e8066ecb69..1eb146ea958 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter, feature_category: :importers do
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
- let(:client) { double(:client) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:github_comment) do
{
@@ -90,6 +90,10 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter, feature_catego
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each diff note in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
index bc14b81bd91..371e76efb75 100644
--- a/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedMilestone do
'actor' => { 'id' => user.id, 'login' => user.username },
'event' => event_type,
'commit_id' => nil,
- 'milestone_title' => milestone.title,
+ 'milestone_title' => milestone_title,
'issue_db_id' => issuable.id,
'created_at' => '2022-04-26 18:30:53 UTC',
'issue' => { 'number' => issuable.iid, pull_request: issuable.is_a?(MergeRequest) }
@@ -35,11 +35,23 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedMilestone do
end
shared_examples 'new event' do
- it 'creates a new milestone event' do
- expect { importer.execute(issue_event) }.to change { issuable.resource_milestone_events.count }
- .from(0).to(1)
- expect(issuable.resource_milestone_events.last)
- .to have_attributes(expected_event_attrs)
+ context 'when a matching milestone exists in GitLab' do
+ let(:milestone_title) { milestone.title }
+
+ it 'creates a new milestone event' do
+ expect { importer.execute(issue_event) }.to change { issuable.resource_milestone_events.count }
+ .from(0).to(1)
+ expect(issuable.resource_milestone_events.last)
+ .to have_attributes(expected_event_attrs)
+ end
+ end
+
+ context 'when a matching milestone does not exist in GitLab' do
+ let(:milestone_title) { 'A deleted milestone title' }
+
+ it 'does not create a new milestone event without a milestone' do
+ expect { importer.execute(issue_event) }.not_to change { issuable.resource_milestone_events.count }
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/events/merged_spec.rb b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
new file mode 100644
index 00000000000..4ea62557dd6
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category: :importers do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:commit_id) { nil }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
+ 'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
+ 'actor' => { 'id' => user.id, 'login' => user.username },
+ 'event' => 'merged',
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'commit_id' => commit_id,
+ 'issue' => { 'number' => merge_request.iid, pull_request: true }
+ )
+ end
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(merge_request.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ end
+
+ it 'creates expected event and state event' do
+ importer.execute(issue_event)
+
+ expect(merge_request.events.count).to eq 1
+ expect(merge_request.events.first).to have_attributes(
+ project_id: project.id,
+ author_id: user.id,
+ target_id: merge_request.id,
+ target_type: merge_request.class.name,
+ action: 'merged',
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ )
+
+ expect(merge_request.resource_state_events.count).to eq 1
+ expect(merge_request.resource_state_events.first).to have_attributes(
+ user_id: user.id,
+ merge_request_id: merge_request.id,
+ state: 'merged',
+ created_at: issue_event.created_at,
+ close_after_error_tracking_resolve: false,
+ close_auto_resolve_prometheus_alert: false
+ )
+ end
+
+ context 'when commit ID is present' do
+ let!(:commit) { create(:commit, project: project) }
+ let(:commit_id) { commit.id }
+
+ it 'creates expected event and state event' do
+ importer.execute(issue_event)
+
+ expect(merge_request.events.count).to eq 1
+ state_event = merge_request.resource_state_events.last
+ expect(state_event.source_commit).to eq commit_id[0..40]
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
index 91121f3c3fc..2389489e867 100644
--- a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
@@ -2,20 +2,19 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab_redis_cache do
+RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab_redis_cache,
+ feature_category: :importers do
let(:importer) { described_class.new(issue_event, project, client) }
- let(:project) { create(:project) }
- let(:client) { instance_double('Gitlab::GithubImport::Client') }
- let(:user) { create(:user) }
- let(:issue) { create(:issue, project: project) }
+ let(:project) { build(:project) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
'id' => 6501124486,
'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
- 'actor' => { 'id' => actor_id, 'login' => 'alice' },
+ 'actor' => { 'id' => 1, 'login' => 'alice' },
'event' => event_name,
'commit_id' => '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
'commit_url' =>
@@ -25,17 +24,13 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
)
end
- let(:actor_id) { user.id }
let(:event_name) { 'closed' }
shared_examples 'triggers specific event importer' do |importer_class|
it importer_class.name do
- specific_importer = double(importer_class.name) # rubocop:disable RSpec/VerifiedDoubles
-
- expect(importer_class)
- .to receive(:new).with(project, client)
- .and_return(specific_importer)
- expect(specific_importer).to receive(:execute).with(issue_event)
+ expect_next_instance_of(importer_class, project, client) do |importer|
+ expect(importer).to receive(:execute).with(issue_event)
+ end
importer.execute
end
@@ -45,85 +40,79 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
context "when it's closed issue event" do
let(:event_name) { 'closed' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::Closed
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Closed
end
context "when it's reopened issue event" do
let(:event_name) { 'reopened' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::Reopened
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Reopened
end
context "when it's labeled issue event" do
let(:event_name) { 'labeled' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedLabel
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedLabel
end
context "when it's unlabeled issue event" do
let(:event_name) { 'unlabeled' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedLabel
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedLabel
end
context "when it's renamed issue event" do
let(:event_name) { 'renamed' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::Renamed
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Renamed
end
context "when it's milestoned issue event" do
let(:event_name) { 'milestoned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedMilestone
end
context "when it's demilestoned issue event" do
let(:event_name) { 'demilestoned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedMilestone
end
context "when it's cross-referenced issue event" do
let(:event_name) { 'cross-referenced' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::CrossReferenced
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::CrossReferenced
end
context "when it's assigned issue event" do
let(:event_name) { 'assigned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedAssignee
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedAssignee
end
context "when it's unassigned issue event" do
let(:event_name) { 'unassigned' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedAssignee
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedAssignee
end
context "when it's review_requested issue event" do
let(:event_name) { 'review_requested' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedReviewer
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedReviewer
end
context "when it's review_request_removed issue event" do
let(:event_name) { 'review_request_removed' }
- it_behaves_like 'triggers specific event importer',
- Gitlab::GithubImport::Importer::Events::ChangedReviewer
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::ChangedReviewer
+ end
+
+ context "when it's merged issue event" do
+ let(:event_name) { 'merged' }
+
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Merged
end
context "when it's unknown issue event" do
diff --git a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
index 9aba6a2b02c..f7ee6fee6dc 100644
--- a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client, parallel: parallel) }
- let(:project) { instance_double(Project, id: 4, import_source: 'foo/bar') }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:parallel) { true }
@@ -74,6 +74,10 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_cate
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each note in parallel' do
allow(importer).to receive(:each_object_to_import).and_yield(issue_event)
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
index 1bfdce04187..9451d1dfc37 100644
--- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter, feature_category: :importers do
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
- let(:client) { double(:client) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
@@ -83,6 +83,10 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter, feature_category:
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each issue in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 3f5ee68d264..a5ec39b4177 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -111,6 +111,10 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter, feature_categ
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each lfs object in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
index 8c93963f325..92d3071c826 100644
--- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::NotesImporter, feature_category: :importers do
- let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
- let(:client) { double(:client) }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:github_comment) do
{
@@ -76,6 +76,10 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter, feature_category:
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each note in parallel' do
importer = described_class.new(project, client)
diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
index 8e99585109b..b0892767fb3 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, feature_category: :importers do
subject(:importer) { described_class.new(project, client, parallel: parallel) }
- let(:project) { instance_double('Project', id: 4, import_source: 'foo/bar') }
- let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:project) { build(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:parallel) { true }
let(:branches) do
@@ -112,7 +112,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
end
it 'imports each protected branch in sequence' do
- protected_branch_importer = instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter')
+ protected_branch_importer = instance_double(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
expect(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
.to receive(:new)
@@ -133,6 +133,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
describe '#parallel_import', :clean_gitlab_redis_cache do
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(client).to receive(:branches).and_return(branches)
allow(client)
.to receive(:branch_protection)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
index 4321997815a..f5779f300b8 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
@@ -46,6 +46,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewsImporter, fe
let(:review) { { id: 1 } }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches the pull requests reviews data' do
page = Struct.new(:objects, :number).new([review], 1)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index 10e413fdfe5..1a0adbbe3a3 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -93,6 +93,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
end
describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'imports each note in parallel' do
importer = described_class.new(project, client)
@@ -112,9 +116,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
end
describe '#each_object_to_import', :clean_gitlab_redis_cache do
- let(:importer) { described_class.new(project, client) }
-
before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
page = double(:page, objects: [pull_request], number: 1)
expect(client)
@@ -127,6 +130,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
.and_yield(page)
end
+ let(:importer) { described_class.new(project, client) }
+
it 'yields every pull request to the supplied block' do
expect { |b| importer.each_object_to_import(&b) }
.to yield_with_args(pull_request)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
index 081d08edfb3..6fe0494d7cd 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
@@ -29,6 +29,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter d
let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches data' do
expect(client)
.to receive(:each_page)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
index dde730d46d2..91f89f0779c 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter do
+RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter, feature_category: :importers do
let(:client) { double }
let_it_be(:project) { create(:project, :import_started, import_source: 'http://somegithub.com') }
@@ -101,14 +101,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
before do
- allow(client).to receive(:each_page)
- .once
- .with(
- :issue_timeline,
- project.import_source,
- issuable.iid,
- { state: 'all', sort: 'created', direction: 'asc', page: 1 }
- ).and_yield(page)
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ allow(client).to receive(:each_page).once.with(:issue_timeline,
+ project.import_source, issuable.iid, { state: 'all', sort: 'created', direction: 'asc', page: 1 }
+ ).and_yield(page)
end
context 'with issues' do
@@ -192,5 +188,18 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
expect(counter).to eq 0
end
end
+
+ context 'when event is not supported' do
+ let(:issue_event) do
+ struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
+ struct.new(id: 1, event: 'not_supported_event', created_at: '2022-04-26 18:30:53 UTC')
+ end
+
+ it "doesn't process this event" do
+ counter = 0
+ subject.each_object_to_import { counter += 1 }
+ expect(counter).to eq 0
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
index e1f65546e1d..88613244c8b 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
@@ -28,6 +28,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches data' do
expect(client)
.to receive(:each_page)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
index 5523b97acc3..601cd7a8f15 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
@@ -29,6 +29,10 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesIm
let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'fetches data' do
expect(client)
.to receive(:each_page)
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
index 977fef95d64..3fe07923a50 100644
--- a/spec/lib/gitlab/github_import/issuable_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -48,34 +48,6 @@ RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache,
expect { finder.database_id }.to raise_error(TypeError)
end
- context 'with FF import_fallback_to_db_empty_cache disabled' do
- before do
- stub_feature_flags(import_fallback_to_db_empty_cache: false)
- end
-
- it 'returns nil if object does not exist' do
- missing_issue = double(:issue, issuable_type: 'MergeRequest', issuable_id: 999)
-
- expect(described_class.new(project, missing_issue).database_id).to be_nil
- end
-
- it 'does not fetch object id from database if not in cache' do
- expect(finder.database_id).to eq(nil)
- end
-
- it 'fetches object id from cache if present' do
- finder.cache_database_id(10)
-
- expect(finder.database_id).to eq(10)
- end
-
- it 'returns -1 if cache is -1' do
- finder.cache_database_id(-1)
-
- expect(finder.database_id).to eq(-1)
- end
- end
-
context 'when group is present' do
context 'when settings single_endpoint_notes_import is enabled' do
let(:single_endpoint_optional_stage) { true }
diff --git a/spec/lib/gitlab/github_import/job_delay_calculator_spec.rb b/spec/lib/gitlab/github_import/job_delay_calculator_spec.rb
new file mode 100644
index 00000000000..3ddf8136dcf
--- /dev/null
+++ b/spec/lib/gitlab/github_import/job_delay_calculator_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::JobDelayCalculator, feature_category: :importers do
+ let(:project) { build(:project) }
+
+ let(:importer_class) do
+ Class.new do
+ attr_reader :project
+
+ def initialize(project)
+ @project = project
+ end
+
+ include Gitlab::GithubImport::JobDelayCalculator
+ end
+ end
+
+ describe "#parallel_import_batch" do
+ subject { importer_class.new(project).parallel_import_batch }
+
+ it { is_expected.to eq({ size: 5000, delay: 1.minute }) }
+
+ context 'when `github_import_increased_concurrent_workers` feature flag is disabled' do
+ before do
+ stub_feature_flags(github_import_increased_concurrent_workers: false)
+ end
+
+ it { is_expected.to eq({ size: 1000, delay: 1.minute }) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/label_finder_spec.rb b/spec/lib/gitlab/github_import/label_finder_spec.rb
index e46595974d1..4c01e2b65da 100644
--- a/spec/lib/gitlab/github_import/label_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/label_finder_spec.rb
@@ -49,34 +49,6 @@ RSpec.describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache, fea
expect(finder.id_for(feature.name)).to eq(feature.id)
end
end
-
- context 'with FF import_fallback_to_db_empty_cache disabled' do
- before do
- stub_feature_flags(import_fallback_to_db_empty_cache: false)
- end
-
- it 'returns nil for a non existing label name' do
- expect(finder.id_for('kittens')).to be_nil
- end
-
- it 'does not fetch object id from database if not in cache' do
- expect(finder.id_for(feature.name)).to be_nil
- end
-
- it 'fetches object id from cache if present' do
- finder.build_cache
-
- expect(finder.id_for(feature.name)).to eq(feature.id)
- end
-
- it 'returns -1 if cache is -1' do
- key = finder.cache_key_for(bug.name)
-
- Gitlab::Cache::Import::Caching.write(key, -1)
-
- expect(finder.id_for(bug.name)).to eq(-1)
- end
- end
end
describe '#build_cache' do
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
index 62886981de1..91f1c3b8cb9 100644
--- a/spec/lib/gitlab/github_import/milestone_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -57,36 +57,6 @@ RSpec.describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache,
expect(finder.id_for(issuable)).to eq(milestone.id)
end
end
-
- context 'with FF import_fallback_to_db_empty_cache disabled' do
- before do
- stub_feature_flags(import_fallback_to_db_empty_cache: false)
- end
-
- it 'returns nil if object does not exist' do
- missing_issuable = double(:issuable, milestone_number: 999)
-
- expect(finder.id_for(missing_issuable)).to be_nil
- end
-
- it 'does not fetch object id from database if not in cache' do
- expect(finder.id_for(issuable)).to be_nil
- end
-
- it 'fetches object id from cache if present' do
- finder.build_cache
-
- expect(finder.id_for(issuable)).to eq(milestone.id)
- end
-
- it 'returns -1 if cache is -1' do
- key = finder.cache_key_for(milestone.iid)
-
- Gitlab::Cache::Import::Caching.write(key, -1)
-
- expect(finder.id_for(issuable)).to eq(-1)
- end
- end
end
describe '#build_cache' do
diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb
index 964bdd6aad1..aa551195a35 100644
--- a/spec/lib/gitlab/github_import/object_counter_spec.rb
+++ b/spec/lib/gitlab/github_import/object_counter_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://github.com/vim/vim.git') }
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ end
+
it 'validates the operation being incremented' do
expect { described_class.increment(project, :issue, :unknown) }
.to raise_error(ArgumentError, 'operation must be fetched or imported')
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index e0b1ff1bc33..3188206de5b 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -275,17 +275,10 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :impo
let(:batch_delay) { 1.minute }
before do
- allow(importer)
- .to receive(:representation_class)
- .and_return(repr_class)
-
- allow(importer)
- .to receive(:sidekiq_worker_class)
- .and_return(worker_class)
-
- allow(repr_class)
- .to receive(:from_api_response)
- .with(object, {})
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
+ allow(importer).to receive(:representation_class).and_return(repr_class)
+ allow(importer).to receive(:sidekiq_worker_class).and_return(worker_class)
+ allow(repr_class).to receive(:from_api_response).with(object, {})
.and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' })
end
diff --git a/spec/lib/gitlab/github_import/representation/representable_spec.rb b/spec/lib/gitlab/github_import/representation/representable_spec.rb
new file mode 100644
index 00000000000..4bc495c40f5
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/representable_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::Representable, feature_category: :importers do
+ let(:representation_class) do
+ subject_module = described_class
+
+ Class.new do
+ include subject_module
+ end
+ end
+
+ let(:representable) { representation_class.new }
+
+ describe '#github_identifiers' do
+ subject(:github_identifiers) { representable.github_identifiers }
+
+ context 'when class does not define `#github_identifiers`' do
+ it 'tracks the error' do
+ error = NotImplementedError.new('Subclasses must implement #github_identifiers')
+
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ is_expected.to eq({})
+ end
+ end
+
+ context 'when class defines `#github_identifiers`' do
+ let(:representation_class) do
+ Class.new(super()) do
+ def github_identifiers
+ { id: 1 }
+ end
+ end
+ end
+
+ it 'does not track an exception and returns the identifiers' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ is_expected.to eq({ id: 1 })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/settings_spec.rb b/spec/lib/gitlab/github_import/settings_spec.rb
index de497bc6689..ea1526ca25f 100644
--- a/spec/lib/gitlab/github_import/settings_spec.rb
+++ b/spec/lib/gitlab/github_import/settings_spec.rb
@@ -62,12 +62,11 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
collaborators_import: false,
foo: :bar
},
- timeout_strategy: "optimistic",
- additional_access_tokens: %w[foo bar]
+ timeout_strategy: "optimistic"
}.stringify_keys
end
- it 'puts optional steps, timeout strategy & access tokens into projects import_data' do
+ it 'puts optional steps and timeout strategy into projects import_data' do
project.build_or_assign_import_data(credentials: { user: 'token' })
settings.write(data_input)
@@ -76,8 +75,6 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
.to eq optional_stages.stringify_keys
expect(project.import_data.data['timeout_strategy'])
.to eq("optimistic")
- expect(project.import_data.credentials.fetch(:additional_access_tokens))
- .to eq(data_input['additional_access_tokens'])
end
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 8453f002bc0..1721f470b33 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1, group: nil) }
it 'returns a new Client with a custom token' do
- allow(project).to receive(:import_data)
-
expect(described_class::Client)
.to receive(:new)
.with('123', host: nil, parallel: true, per_page: 100)
@@ -26,7 +24,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
expect(project)
.to receive(:import_data)
.and_return(import_data)
- .twice
expect(described_class::Client)
.to receive(:new)
@@ -49,31 +46,12 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
described_class.ghost_user_id
end
end
-
- context 'when there are additional access tokens' do
- it 'returns a new ClientPool containing all tokens' do
- import_data = double(:import_data, credentials: { user: '123', additional_access_tokens: %w[foo bar] })
-
- expect(project)
- .to receive(:import_data)
- .and_return(import_data)
- .twice
-
- expect(described_class::ClientPool)
- .to receive(:new)
- .with(token_pool: %w[foo bar 123], host: nil, parallel: true, per_page: 100)
-
- described_class.new_client_for(project)
- end
- end
end
context 'GitHub Enterprise' do
let(:project) { double(:project, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git', group: nil) }
it 'returns a new Client with a custom token' do
- allow(project).to receive(:import_data)
-
expect(described_class::Client)
.to receive(:new)
.with('123', host: 'http://github.another-domain.com/api/v3', parallel: true, per_page: 100)
@@ -87,7 +65,6 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do
expect(project)
.to receive(:import_data)
.and_return(import_data)
- .twice
expect(described_class::Client)
.to receive(:new)
diff --git a/spec/lib/gitlab/hook_data/project_builder_spec.rb b/spec/lib/gitlab/hook_data/project_builder_spec.rb
index f80faac563d..9d5eaf0608c 100644
--- a/spec/lib/gitlab/hook_data/project_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/project_builder_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::HookData::ProjectBuilder do
let_it_be(:user) { create(:user, name: 'John', email: 'john@example.com') }
- let_it_be(:namespace) { create(:namespace, owner: user) }
- let_it_be(:project) { create(:project, :internal, name: 'my_project', namespace: namespace) }
+ let_it_be(:user2) { create(:user, name: 'Peter') }
+ let_it_be(:user3_non_owner) { create(:user, name: 'Not_Owner') }
describe '#build' do
let(:data) { described_class.new(project).build(event) }
@@ -24,13 +24,13 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
expect(data[:created_at]).to eq(project.created_at.xmlschema)
expect(data[:updated_at]).to eq(project.updated_at.xmlschema)
- expect(data[:name]).to eq('my_project')
+ expect(data[:name]).to eq(project.name)
expect(data[:path]).to eq(project.path)
expect(data[:path_with_namespace]).to eq(project.full_path)
expect(data[:project_id]).to eq(project.id)
- expect(data[:owner_name]).to eq('John')
- expect(data[:owner_email]).to eq(_('[REDACTED]'))
- expect(data[:owners]).to contain_exactly({ name: 'John', email: _('[REDACTED]') })
+ expect(data[:owner_name]).to eq(owner_name)
+ expect(data[:owner_email]).to eq(owner_email)
+ expect(data[:owners]).to match_array(owners_data)
expect(data[:project_visibility]).to eq('internal')
end
end
@@ -48,40 +48,104 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
end
end
- context 'on create' do
- let(:event) { :create }
+ context 'the project is created in a personal namespace' do
+ let(:owner_name) { user.name }
+ let(:owner_email) { _('[REDACTED]') }
+ let(:owners_data) { [{ name: 'John', email: _('[REDACTED]') }, { name: 'Peter', email: _('[REDACTED]') }] }
+ let_it_be(:namespace) { create(:namespace, owner: user) }
+ let_it_be(:project) { create(:project, :internal, name: 'personal project', namespace: namespace) }
- it { expect(event_name).to eq('project_create') }
+ before_all do
+ project.add_owner(user2)
+ project.add_maintainer(user3_non_owner)
+ end
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'does not include `old_path_with_namespace` attribute'
- end
+ context 'on create' do
+ let(:event) { :create }
- context 'on destroy' do
- let(:event) { :destroy }
+ it { expect(event_name).to eq('project_create') }
- it { expect(event_name).to eq('project_destroy') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'does not include `old_path_with_namespace` attribute'
- end
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('project_destroy') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
- context 'on rename' do
- let(:event) { :rename }
+ context 'on rename' do
+ let(:event) { :rename }
- it { expect(event_name).to eq('project_rename') }
+ it { expect(event_name).to eq('project_rename') }
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'includes `old_path_with_namespace` attribute'
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+
+ context 'on transfer' do
+ let(:event) { :transfer }
+
+ it { expect(event_name).to eq('project_transfer') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
end
- context 'on transfer' do
- let(:event) { :transfer }
+ context 'the project is created in a group' do
+ let(:owner_name) { group.name }
+ let(:owner_email) { "" }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :internal, name: 'group project', namespace: group) }
+ let(:owners_data) { [{ name: 'John', email: _('[REDACTED]') }, { email: "[REDACTED]", name: "Peter" }] }
+
+ before_all do
+ group.add_owner(user)
+ group.add_owner(user2)
+ group.add_maintainer(user3_non_owner)
+ end
+
+ # Repeat the tests in the previous context
+ context 'on create' do
+ let(:event) { :create }
- it { expect(event_name).to eq('project_transfer') }
+ it { expect(event_name).to eq('project_create') }
- it_behaves_like 'includes the required attributes'
- it_behaves_like 'includes `old_path_with_namespace` attribute'
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('project_destroy') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on rename' do
+ let(:event) { :rename }
+
+ it { expect(event_name).to eq('project_rename') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+
+ context 'on transfer' do
+ let(:event) { :transfer }
+
+ it { expect(event_name).to eq('project_transfer') }
+
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
end
end
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index a9e0c6a3b92..3fc486a8984 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -32,6 +32,45 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
described_class.get('/path', allow_object_storage: true)
end
end
+
+ context 'when passing async:true' do
+ it 'calls Gitlab::HTTP_V2.get with default options and async:true' do
+ expect(Gitlab::HTTP_V2).to receive(:get)
+ .with('/path', default_options.merge(async: true))
+
+ described_class.get('/path', async: true)
+ end
+
+ it 'returns a Gitlab::HTTP_V2::LazyResponse object' do
+ stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
+ result = described_class.get('http://example.org', async: true)
+
+ expect(result).to be_a(Gitlab::HTTP_V2::LazyResponse)
+
+ result.execute
+ result.wait
+
+ expect(result.value).to be_a(HTTParty::Response)
+ expect(result.value.body).to eq('hello world')
+ end
+
+ context 'when there is a DB call in the concurrent thread' do
+ it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/432145' do
+ stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
+
+ result = described_class.get('http://example.org', async: true) do |_fragment|
+ User.first
+ end
+
+ result.execute
+ result.wait
+
+ expect { result.value }.to raise_error(Gitlab::Utils::ConcurrentRubyThreadIsUsedError,
+ "Cannot run 'db' if running from `Concurrent::Promise`.")
+ end
+ end
+ end
end
describe '.try_get' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 722b47ac9b8..688487df778 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -76,6 +76,8 @@ work_item_type:
- work_items
- widget_definitions
- enabled_widget_definitions
+- child_restrictions
+- allowed_child_types_by_name
events:
- author
- project
@@ -420,6 +422,7 @@ builds:
- job_artifacts_cluster_image_scanning
- job_artifacts_cyclonedx
- job_artifacts_requirements_v2
+- job_artifacts_repository_xray
- runner_manager
- runner_manager_build
- runner_session
@@ -521,6 +524,7 @@ container_repositories:
- name
project:
- catalog_resource
+- catalog_resource_sync_events
- catalog_resource_versions
- ci_components
- external_status_checks
@@ -570,7 +574,6 @@ project:
- google_play_integration
- pipelines_email_integration
- mattermost_slash_commands_integration
-- shimo_integration
- slack_slash_commands_integration
- harbor_integration
- irker_integration
@@ -831,6 +834,7 @@ project:
- target_branch_rules
- organization
- dora_performance_scores
+- xray_reports
award_emoji:
- awardable
- user
@@ -1053,6 +1057,7 @@ catalog_resource:
- project
- catalog_resource_components
- catalog_resource_versions
+ - catalog_resource_sync_events
catalog_resource_versions:
- project
- release
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index b6328994c5b..3efa33d8879 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -706,6 +706,7 @@ ProjectFeature:
- monitor_access_level
- infrastructure_access_level
- model_experiments_access_level
+- model_registry_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -820,7 +821,6 @@ ProjectSetting:
- allow_merge_on_skipped_pipeline
- only_allow_merge_if_all_status_checks_passed
- has_confluence
-- has_shimo
- has_vulnerabilities
ProtectedEnvironment:
- id
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index 19f17c9079d..48cdeee3d2f 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -74,38 +74,6 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do
end
end
- describe '.import_table' do
- subject { described_class.import_table }
-
- describe 'Bitbucket cloud' do
- it 'returns the ParallelImporter' do
- is_expected.to include(
- described_class::ImportSource.new(
- 'bitbucket',
- 'Bitbucket Cloud',
- Gitlab::BitbucketImport::ParallelImporter
- )
- )
- end
-
- context 'when flag is disabled' do
- before do
- stub_feature_flags(bitbucket_parallel_importer: false)
- end
-
- it 'returns the legacy Importer' do
- is_expected.to include(
- described_class::ImportSource.new(
- 'bitbucket',
- 'Bitbucket Cloud',
- Gitlab::BitbucketImport::Importer
- )
- )
- end
- end
- end
- end
-
describe '.title' do
import_sources = {
'github' => 'GitHub',
diff --git a/spec/lib/gitlab/instrumentation/connection_pool_spec.rb b/spec/lib/gitlab/instrumentation/connection_pool_spec.rb
new file mode 100644
index 00000000000..b7cab2e9900
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/connection_pool_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'support/helpers/rails_helpers'
+
+RSpec.describe Gitlab::Instrumentation::ConnectionPool, feature_category: :redis do
+ let(:option) { { name: 'test', size: 5 } }
+ let(:pool) { ConnectionPool.new(option) { 'nothing' } }
+
+ let_it_be(:size_gauge_args) { [:gitlab_connection_pool_size, 'Size of connection pool', {}, :all] }
+ let_it_be(:available_gauge_args) do
+ [:gitlab_connection_pool_available_count,
+ 'Number of available connections in the pool', {}, :all]
+ end
+
+ subject(:checkout_pool) { pool.checkout }
+
+ describe '.checkout' do
+ let(:size_gauge_double) { instance_double(::Prometheus::Client::Gauge) }
+
+ context 'when tracking for the first time' do
+ it 'initialises gauges' do
+ expect(::Gitlab::Metrics).to receive(:gauge).with(*size_gauge_args).and_call_original
+ expect(::Gitlab::Metrics).to receive(:gauge).with(*available_gauge_args).and_call_original
+
+ checkout_pool
+ end
+ end
+
+ it 'sets the size gauge only once' do
+ expect(::Gitlab::Metrics.gauge(*size_gauge_args)).to receive(:set).with(
+ { pool_name: 'test', pool_key: anything, connection_class: "String" }, 5).once
+
+ checkout_pool
+ checkout_pool
+ end
+
+ context 'when tracking on subsequent calls' do
+ before do
+ pool.checkout # initialise instance variables
+ end
+
+ it 'uses memoized gauges' do
+ expect(::Gitlab::Metrics).not_to receive(:gauge).with(*size_gauge_args)
+ expect(::Gitlab::Metrics).not_to receive(:gauge).with(*available_gauge_args)
+
+ expect(pool.instance_variable_get(:@size_gauge)).not_to receive(:set)
+ .with({ pool_name: 'test', pool_key: anything, connection_class: "String" }, 5)
+ expect(pool.instance_variable_get(:@available_gauge)).to receive(:set)
+ .with({ pool_name: 'test', pool_key: anything, connection_class: "String" }, 4)
+
+ checkout_pool
+ end
+
+ context 'when pool name is omitted' do
+ let(:option) { {} }
+
+ it 'uses unknown name' do
+ expect(pool.instance_variable_get(:@size_gauge)).not_to receive(:set)
+ .with({ pool_name: 'unknown', pool_key: anything, connection_class: "String" }, 5)
+ expect(pool.instance_variable_get(:@available_gauge)).to receive(:set)
+ .with({ pool_name: 'unknown', pool_key: anything, connection_class: "String" }, 4)
+
+ checkout_pool
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation/redis_base_spec.rb b/spec/lib/gitlab/instrumentation/redis_base_spec.rb
index 426997f6e86..f0854b38353 100644
--- a/spec/lib/gitlab/instrumentation/redis_base_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_base_spec.rb
@@ -222,4 +222,18 @@ RSpec.describe Gitlab::Instrumentation::RedisBase, :request_store do
instrumentation_class_a.log_exception(StandardError.new)
end
end
+
+ describe '.instance_count_connection_exception' do
+ before do
+ # initialise connection_exception_counter
+ instrumentation_class_a.instance_count_connection_exception(StandardError.new)
+ end
+
+ it 'counts connection exception' do
+ expect(instrumentation_class_a.instance_variable_get(:@connection_exception_counter)).to receive(:increment)
+ .with({ storage: instrumentation_class_a.storage_key, exception: 'Redis::ConnectionError' })
+
+ instrumentation_class_a.instance_count_connection_exception(Redis::ConnectionError.new)
+ end
+ end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_helper_spec.rb b/spec/lib/gitlab/instrumentation/redis_helper_spec.rb
new file mode 100644
index 00000000000..54659ca2c02
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_helper_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Instrumentation::RedisHelper, :request_store, feature_category: :scalability do
+ include RedisHelpers
+
+ let(:minimal_test_class) do
+ Class.new do
+ include Gitlab::Instrumentation::RedisHelper
+ def initialize
+ @instrumentation_class = Gitlab::Instrumentation::Redis::Cache
+ end
+
+ def check_command(commands, pipelined)
+ instrument_call(commands, @instrumentation_class, pipelined) { 'empty block' }
+ end
+
+ def test_read(result)
+ measure_read_size(result, @instrumentation_class)
+ end
+
+ def test_write(command)
+ measure_write_size(command, @instrumentation_class)
+ end
+
+ def test_exclusion(commands)
+ exclude_from_apdex?(commands)
+ end
+ end
+ end
+
+ before do
+ stub_const("MinimalTestClass", minimal_test_class)
+ end
+
+ subject(:minimal_test_class_instance) { MinimalTestClass.new }
+
+ describe '.instrument_call' do
+ it 'instruments request count' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_count_request).with(1)
+ expect(Gitlab::Instrumentation::Redis::Cache).not_to receive(:instance_count_pipelined_request)
+
+ minimal_test_class_instance.check_command([[:set, 'foo', 'bar']], false)
+ end
+
+ it 'performs cluster validation' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:redis_cluster_validate!).once
+
+ minimal_test_class_instance.check_command([[:set, 'foo', 'bar']], false)
+ end
+
+ context 'when command is not valid for Redis Cluster' do
+ before do
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:redis_cluster_validate!).and_return(false)
+ end
+
+ it 'reports cross slot request' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_cross_slot_request_count).once
+
+ minimal_test_class_instance.check_command([[:mget, 'foo', 'bar']], false)
+ end
+ end
+
+ context 'when an error is raised' do
+ # specific error behaviours are tested in spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
+ # this spec tests for the generic behaviour to verify that `ensure` works for any general error types
+ before do
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_count_request)
+ .and_raise(StandardError)
+ end
+
+ it 'ensures duration is tracked' do
+ commands = [[:set, 'foo', 'bar']]
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_observe_duration).once
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_request_count).with(1).once
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:add_duration).once
+ allow(Gitlab::Instrumentation::Redis::Cache).to receive(:add_call_details).with(anything, commands).once
+
+ expect { minimal_test_class_instance.check_command(commands, false) }.to raise_error(StandardError)
+ end
+ end
+
+ context 'when pipelined' do
+ it 'instruments pipelined request count' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:instance_count_pipelined_request)
+
+ minimal_test_class_instance.check_command([[:get, '{user1}:bar'], [:get, '{user1}:foo']], true)
+ end
+ end
+ end
+
+ describe '.measure_read_size' do
+ it 'reads array' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_read_bytes).with(3).exactly(3).times
+
+ minimal_test_class_instance.test_read(%w[bar foo buz])
+ end
+
+ it 'reads Integer' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_read_bytes).with(4)
+
+ minimal_test_class_instance.test_read(1234)
+ end
+
+ it 'reads String' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_read_bytes).with(3)
+
+ minimal_test_class_instance.test_read('bar')
+ end
+ end
+
+ describe '.measure_write_size' do
+ it 'measures command size' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_write_bytes).with(9)
+
+ minimal_test_class_instance.test_write([:set, 'foo', 'bar'])
+ end
+
+ it 'accept array input' do
+ expect(Gitlab::Instrumentation::Redis::Cache).to receive(:increment_write_bytes).with((9 + 12))
+
+ minimal_test_class_instance.test_write([[:set, 'foo', 'bar'], [:lpush, 'que', 'item']])
+ end
+ end
+
+ describe '.exclude_from_apdex?' do
+ it 'returns false if all commands are allowed' do
+ expect(minimal_test_class_instance.test_exclusion([[:set, 'foo', 'bar'], [:lpush, 'que', 'item']])).to eq(false)
+ end
+
+ it 'returns true if any commands are banned' do
+ expect(minimal_test_class_instance.test_exclusion([[:brpop, 'foo', 2], [:lpush, 'que', 'item']])).to eq(true)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index 4168fdf5425..e9bd0056e5f 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -72,6 +72,25 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :request_store, featur
end
end
+ context 'when encountering connection exceptions within process' do
+ before do
+ redis_store_class.with do |redis|
+ allow(redis._client).to receive(:write).and_call_original
+ end
+ end
+
+ it 'counts connection exceptions' do
+ redis_store_class.with do |redis|
+ expect(redis._client).to receive(:write).with([:get, 'foobar']).and_raise(::Redis::ConnectionError)
+ end
+
+ expect(instrumentation_class).to receive(:instance_count_connection_exception)
+ .with(instance_of(Redis::ConnectionError)).and_call_original
+
+ redis_store_class.with { |redis| redis.call(:get, 'foobar') }
+ end
+ end
+
context 'when encountering exceptions' do
where(:case_name, :exception, :exception_counter) do
'generic exception' | Redis::CommandError | :instance_count_exception
diff --git a/spec/lib/gitlab/internal_events_spec.rb b/spec/lib/gitlab/internal_events_spec.rb
index 20625add292..7ac583b24ce 100644
--- a/spec/lib/gitlab/internal_events_spec.rb
+++ b/spec/lib/gitlab/internal_events_spec.rb
@@ -7,27 +7,42 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
include SnowplowHelpers
before do
+ allow(Gitlab::AppJsonLogger).to receive(:warn)
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
allow(redis).to receive(:incr)
allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_snowplow)
- allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_property).and_return(:user)
+ allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_property).and_return(unique_property)
allow(fake_snowplow).to receive(:event)
end
- def expect_redis_hll_tracking(event_name)
+ shared_examples 'an event that logs an error' do
+ it 'logs an error' do
+ described_class.track_event(event_name, **event_kwargs)
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
+ .with(described_class::InvalidPropertyTypeError,
+ event_name: event_name,
+ kwargs: event_kwargs
+ )
+ end
+ end
+
+ def expect_redis_hll_tracking
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
.with(event_name, values: unique_value)
end
- def expect_redis_tracking(event_name)
- expect(redis).to have_received(:incr) do |redis_key|
- expect(redis_key).to end_with(event_name)
+ def expect_redis_tracking
+ call_index = 0
+ expect(redis).to have_received(:incr).twice do |redis_key|
+ expect(redis_key).to end_with(redis_arguments[call_index])
+ call_index += 1
end
end
- def expect_snowplow_tracking(event_name)
+ def expect_snowplow_tracking(expected_namespace = nil)
service_ping_context = Gitlab::Tracking::ServicePingContext
.new(data_source: :redis_hll, event: event_name)
.to_context
@@ -36,33 +51,125 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
expect(SnowplowTracker::SelfDescribingJson).to have_received(:new)
.with(service_ping_context[:schema], service_ping_context[:data]).at_least(:once)
- # Add test for creation of both contexts
- contexts = [instance_of(SnowplowTracker::SelfDescribingJson), instance_of(SnowplowTracker::SelfDescribingJson)]
+ expect(fake_snowplow).to have_received(:event) do |category, provided_event_name, args|
+ expect(category).to eq('InternalEventTracking')
+ expect(provided_event_name).to eq(event_name)
+
+ contexts = args[:context]&.map(&:to_json)
+
+ # Verify Standard Context
+ standard_context = contexts.find do |c|
+ c[:schema] == Gitlab::Tracking::StandardContext::GITLAB_STANDARD_SCHEMA_URL
+ end
+
+ validate_standard_context(standard_context, expected_namespace)
+
+ # Verify Service Ping context
+ service_ping_context = contexts.find { |c| c[:schema] == Gitlab::Tracking::ServicePingContext::SCHEMA_URL }
- expect(fake_snowplow).to have_received(:event)
- .with('InternalEventTracking', event_name, context: contexts)
+ validate_service_ping_context(service_ping_context)
+ end
+ end
+
+ def validate_standard_context(standard_context, expected_namespace)
+ namespace = expected_namespace || project&.namespace
+ expect(standard_context).not_to eq(nil)
+ expect(standard_context[:data][:user_id]).to eq(user&.id)
+ expect(standard_context[:data][:namespace_id]).to eq(namespace&.id)
+ expect(standard_context[:data][:project_id]).to eq(project&.id)
+ end
+
+ def validate_service_ping_context(service_ping_context)
+ expect(service_ping_context).not_to eq(nil)
+ expect(service_ping_context[:data][:data_source]).to eq(:redis_hll)
+ expect(service_ping_context[:data][:event_name]).to eq(event_name)
end
let_it_be(:user) { build(:user, id: 1) }
- let_it_be(:project) { build(:project, id: 2) }
- let_it_be(:namespace) { project.namespace }
+ let_it_be(:project_namespace) { build(:namespace, id: 2) }
+ let_it_be(:project) { build(:project, id: 3, namespace: project_namespace) }
let(:redis) { instance_double('Redis') }
let(:fake_snowplow) { instance_double(Gitlab::Tracking::Destinations::Snowplow) }
let(:event_name) { 'g_edit_by_web_ide' }
+ let(:unique_property) { :user }
let(:unique_value) { user.id }
+ let(:redis_arguments) { [event_name, Date.today.strftime('%G-%V')] }
+
+ context 'when only user is passed' do
+ let(:project) { nil }
+ let(:namespace) { nil }
+
+ it 'updated all tracking methods' do
+ described_class.track_event(event_name, user: user)
+
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking
+ end
+ end
+
+ context 'when namespace is passed' do
+ let(:namespace) { build(:namespace, id: 4) }
+
+ it 'uses id from namespace' do
+ described_class.track_event(event_name, user: user, project: project, namespace: namespace)
+
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(namespace)
+ end
+ end
+
+ context 'when namespace is not passed' do
+ let(:unique_property) { :namespace }
+ let(:unique_value) { project.namespace.id }
+
+ it 'uses id from projects namespace' do
+ described_class.track_event(event_name, user: user, project: project)
+
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(project.namespace)
+ end
+ end
+
+ context 'when arguments are invalid' do
+ context 'when user is not an instance of User' do
+ let(:user) { 'a_string' }
+
+ it_behaves_like 'an event that logs an error' do
+ let(:event_kwargs) { { user: user, project: project.id } }
+ end
+ end
+
+ context 'when project is not an instance of Project' do
+ let(:project) { 42 }
+
+ it_behaves_like 'an event that logs an error' do
+ let(:event_kwargs) { { user: user.id, project: project } }
+ end
+ end
+
+ context 'when namespace is not an instance of Namespace' do
+ let(:namespace) { false }
+
+ it_behaves_like 'an event that logs an error' do
+ let(:event_kwargs) { { user: user.id, namespace: namespace } }
+ end
+ end
+ end
it 'updates Redis, RedisHLL and Snowplow', :aggregate_failures do
- params = { user: user, project: project, namespace: namespace }
- described_class.track_event(event_name, **params)
+ described_class.track_event(event_name, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_redis_hll_tracking(event_name)
- expect_snowplow_tracking(event_name) # Add test for arguments
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking
end
it 'rescues error', :aggregate_failures do
- params = { user: user, project: project, namespace: namespace }
+ params = { user: user, project: project }
error = StandardError.new("something went wrong")
allow(fake_snowplow).to receive(:event).and_raise(error)
@@ -83,12 +190,12 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
expect { described_class.track_event('unknown_event') }.not_to raise_error
end
- it 'logs error on missing property', :aggregate_failures do
+ it 'logs warning on missing property', :aggregate_failures do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
- expect_redis_tracking(event_name)
- expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
- .with(described_class::InvalidPropertyError, event_name: event_name, kwargs: { merge_request_id: 1 })
+ expect_redis_tracking
+ expect(Gitlab::AppJsonLogger).to have_received(:warn)
+ .with(message: /should be triggered with a named parameter/)
end
context 'when unique property is missing' do
@@ -100,7 +207,7 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'logs error on missing unique property', :aggregate_failures do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
- expect_redis_tracking(event_name)
+ expect_redis_tracking
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
end
end
@@ -119,27 +226,17 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'is used when logging to RedisHLL', :aggregate_failures do
described_class.track_event(event_name, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_redis_hll_tracking(event_name)
- expect_snowplow_tracking(event_name)
+ expect_redis_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking
end
context 'when property is missing' do
it 'logs error' do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
- expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
- .with(described_class::InvalidPropertyError, event_name: event_name, kwargs: { merge_request_id: 1 })
- end
- end
-
- context 'when method does not exist on property', :aggregate_failures do
- it 'logs error on missing method' do
- expect { described_class.track_event(event_name, project: "a_string") }.not_to raise_error
-
- expect_redis_tracking(event_name)
- expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception)
- .with(described_class::InvalidMethodError, event_name: event_name, kwargs: { project: 'a_string' })
+ expect(Gitlab::AppJsonLogger).to have_received(:warn)
+ .with(message: /should be triggered with a named parameter/)
end
end
@@ -147,8 +244,8 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'logs to Redis and RedisHLL but not Snowplow' do
described_class.track_event(event_name, send_snowplow_event: false, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_redis_hll_tracking(event_name)
+ expect_redis_tracking
+ expect_redis_hll_tracking
expect(fake_snowplow).not_to have_received(:event)
end
end
@@ -166,9 +263,75 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
it 'logs to Redis and Snowplow but not RedisHLL', :aggregate_failures do
described_class.track_event(event_name, user: user, project: project)
- expect_redis_tracking(event_name)
- expect_snowplow_tracking(event_name)
+ expect_redis_tracking
+ expect_snowplow_tracking(project.namespace)
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to have_received(:track_event)
end
end
+
+ describe 'Product Analytics tracking' do
+ let(:app_id) { 'foobar' }
+ let(:url) { 'http://localhost:4000' }
+ let(:sdk_client) { instance_double('GitlabSDK::Client') }
+ let(:event_kwargs) { { user: user, project: project } }
+
+ before do
+ described_class.clear_memoization(:gitlab_sdk_client)
+
+ stub_env('GITLAB_ANALYTICS_ID', app_id)
+ stub_env('GITLAB_ANALYTICS_URL', url)
+ end
+
+ subject(:track_event) { described_class.track_event(event_name, **event_kwargs) }
+
+ shared_examples 'does not send a Product Analytics event' do
+ it 'does not call the Product Analytics Ruby SDK' do
+ expect(GitlabSDK::Client).not_to receive(:new)
+
+ track_event
+ end
+ end
+
+ context 'when internal_events_for_product_analytics FF is enabled' do
+ before do
+ stub_feature_flags(internal_events_for_product_analytics: true)
+
+ allow(GitlabSDK::Client)
+ .to receive(:new)
+ .with(app_id: app_id, host: url)
+ .and_return(sdk_client)
+ end
+
+ it 'calls Product Analytics Ruby SDK', :aggregate_failures do
+ expect(sdk_client).to receive(:identify).with(user.id)
+ expect(sdk_client).to receive(:track)
+ .with(event_name, { project_id: project.id, namespace_id: project.namespace.id })
+
+ track_event
+ end
+
+ context 'when GITLAB_ANALYTICS_ID is nil' do
+ let(:app_id) { nil }
+
+ it_behaves_like 'does not send a Product Analytics event'
+ end
+
+ context 'when GITLAB_ANALYTICS_URL is nil' do
+ let(:url) { nil }
+
+ it_behaves_like 'does not send a Product Analytics event'
+ end
+ end
+
+ context 'when internal_events_for_product_analytics FF is disabled' do
+ let(:app_id) { 'foobar' }
+ let(:url) { 'http://localhost:4000' }
+
+ before do
+ stub_feature_flags(internal_events_for_product_analytics: false)
+ end
+
+ it_behaves_like 'does not send a Product Analytics event'
+ end
+ end
end
diff --git a/spec/lib/gitlab/issuables_count_for_state_spec.rb b/spec/lib/gitlab/issuables_count_for_state_spec.rb
index cc4ebba863d..e85dc890cbf 100644
--- a/spec/lib/gitlab/issuables_count_for_state_spec.rb
+++ b/spec/lib/gitlab/issuables_count_for_state_spec.rb
@@ -72,7 +72,6 @@ RSpec.describe Gitlab::IssuablesCountForState do
let_it_be(:group) { create(:group) }
let(:cache_options) { { expires_in: 1.hour } }
- let(:cache_key) { ['group', group.id, 'issues'] }
let(:threshold) { described_class::THRESHOLD }
let(:states_count) { { opened: 1, closed: 1, all: 2 } }
let(:params) { {} }
@@ -95,9 +94,7 @@ RSpec.describe Gitlab::IssuablesCountForState do
end
end
- context 'with Issues' do
- let(:finder) { IssuesFinder.new(user, params) }
-
+ shared_examples 'calculating counts for issuables' do
it 'returns -1 for the requested state' do
allow(finder).to receive(:count_by_state).and_raise(ActiveRecord::QueryCanceled)
expect(Rails.cache).not_to receive(:write)
@@ -162,6 +159,20 @@ RSpec.describe Gitlab::IssuablesCountForState do
end
end
+ context 'with Issues' do
+ let(:finder) { IssuesFinder.new(user, params) }
+ let(:cache_key) { ['group', group.id, 'issues'] }
+
+ it_behaves_like 'calculating counts for issuables'
+ end
+
+ context 'with Work Items' do
+ let(:finder) { ::WorkItems::WorkItemsFinder.new(user, params) }
+ let(:cache_key) { ['group', group.id, 'work_items'] }
+
+ it_behaves_like 'calculating counts for issuables'
+ end
+
context 'with Merge Requests' do
let(:finder) { MergeRequestsFinder.new(user, params) }
diff --git a/spec/lib/gitlab/kas/client_spec.rb b/spec/lib/gitlab/kas/client_spec.rb
index e8884ce352f..f2745d940de 100644
--- a/spec/lib/gitlab/kas/client_spec.rb
+++ b/spec/lib/gitlab/kas/client_spec.rb
@@ -45,25 +45,25 @@ RSpec.describe Gitlab::Kas::Client do
expect(token).to receive(:audience=).with(described_class::JWT_AUDIENCE)
end
- describe '#get_connected_agents' do
+ describe '#get_connected_agents_by_agent_ids' do
let(:stub) { instance_double(Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub) }
- let(:request) { instance_double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsRequest) }
- let(:response) { double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsResponse, agents: connected_agents) }
+ let(:request) { instance_double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIdsRequest) }
+ let(:response) { double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIdsResponse, agents: connected_agents) }
let(:connected_agents) { [double] }
- subject { described_class.new.get_connected_agents(project: project) }
+ subject { described_class.new.get_connected_agents_by_agent_ids(agent_ids: [agent.id]) }
before do
expect(Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub).to receive(:new)
.with('example.kas.internal', :this_channel_is_insecure, timeout: described_class::TIMEOUT)
.and_return(stub)
- expect(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsRequest).to receive(:new)
- .with(project_id: project.id)
+ expect(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIdsRequest).to receive(:new)
+ .with(agent_ids: [agent.id])
.and_return(request)
- expect(stub).to receive(:get_connected_agents)
+ expect(stub).to receive(:get_connected_agents_by_agent_ids)
.with(request, metadata: { 'authorization' => 'bearer test-token' })
.and_return(response)
end
diff --git a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
index 07a87b245c2..3bda8dfc52e 100644
--- a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Store, :clean_gitlab_redis_cache do
results = described_class.bulk_read([storable])
- expect(results[storable.cache_key].value.symbolize_keys)
+ expect(results[storable.cache_key].symbolize_keys)
.to eq(field_1_html: "hello", field_2_html: "world", cached_markdown_version: "1")
end
end
diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb
index c442208617f..9f97ef99a5d 100644
--- a/spec/lib/gitlab/memory/watchdog_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog_spec.rb
@@ -178,19 +178,6 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category:
watchdog.call
end
- context 'when enforce_memory_watchdog ops toggle is off' do
- before do
- stub_feature_flags(enforce_memory_watchdog: false)
- end
-
- it 'always uses the NullHandler' do
- expect(handler).not_to receive(:call)
- expect(described_class::Handlers::NullHandler.instance).to receive(:call).and_return(true)
-
- watchdog.call
- end
- end
-
context 'when multiple monitors exceeds allowed number of strikes' do
before do
watchdog.configure do |config|
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
deleted file mode 100644
index e4f53ab3f49..00000000000
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ /dev/null
@@ -1,363 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Metrics::System do
- context 'when /proc files exist' do
- # Modified column 22 to be 1000 (starttime ticks)
- let(:proc_stat) do
- <<~SNIP
- 2095 (ruby) R 0 2095 2095 34818 2095 4194560 211267 7897 2 0 287 51 10 1 20 0 5 0 1000 566210560 80885 18446744073709551615 94736211292160 94736211292813 140720919612064 0 0 0 0 0 1107394127 0 0 0 17 3 0 0 0 0 0 94736211303768 94736211304544 94736226689024 140720919619473 140720919619513 140720919619513 140720919621604 0
- SNIP
- end
-
- # Fixtures pulled from:
- # Linux carbon 5.3.0-7648-generic #41~1586789791~19.10~9593806-Ubuntu SMP Mon Apr 13 17:50:40 UTC x86_64 x86_64 x86_64 GNU/Linux
- let(:proc_status) do
- # most rows omitted for brevity
- <<~SNIP
- Name: less
- VmHWM: 2468 kB
- VmRSS: 2468 kB
- RssAnon: 260 kB
- RssFile: 1024 kB
- SNIP
- end
-
- let(:proc_smaps_rollup) do
- # full snapshot
- <<~SNIP
- Rss: 2564 kB
- Pss: 503 kB
- Pss_Anon: 312 kB
- Pss_File: 191 kB
- Pss_Shmem: 0 kB
- Shared_Clean: 2100 kB
- Shared_Dirty: 0 kB
- Private_Clean: 152 kB
- Private_Dirty: 312 kB
- Referenced: 2564 kB
- Anonymous: 312 kB
- LazyFree: 0 kB
- AnonHugePages: 0 kB
- ShmemPmdMapped: 0 kB
- Shared_Hugetlb: 0 kB
- Private_Hugetlb: 0 kB
- Swap: 0 kB
- SwapPss: 0 kB
- Locked: 0 kB
- SNIP
- end
-
- let(:proc_limits) do
- # full snapshot
- <<~SNIP
- Limit Soft Limit Hard Limit Units
- Max cpu time unlimited unlimited seconds
- Max file size unlimited unlimited bytes
- Max data size unlimited unlimited bytes
- Max stack size 8388608 unlimited bytes
- Max core file size 0 unlimited bytes
- Max resident set unlimited unlimited bytes
- Max processes 126519 126519 processes
- Max open files 1024 1048576 files
- Max locked memory 67108864 67108864 bytes
- Max address space unlimited unlimited bytes
- Max file locks unlimited unlimited locks
- Max pending signals 126519 126519 signals
- Max msgqueue size 819200 819200 bytes
- Max nice priority 0 0
- Max realtime priority 0 0
- Max realtime timeout unlimited unlimited us
- SNIP
- end
-
- let(:mem_info) do
- # full snapshot
- <<~SNIP
- MemTotal: 15362536 kB
- MemFree: 3403136 kB
- MemAvailable: 13044528 kB
- Buffers: 272188 kB
- Cached: 8171312 kB
- SwapCached: 0 kB
- Active: 3332084 kB
- Inactive: 6981076 kB
- Active(anon): 1603868 kB
- Inactive(anon): 9044 kB
- Active(file): 1728216 kB
- Inactive(file): 6972032 kB
- Unevictable: 18676 kB
- Mlocked: 18676 kB
- SwapTotal: 0 kB
- SwapFree: 0 kB
- Dirty: 6808 kB
- Writeback: 0 kB
- AnonPages: 1888300 kB
- Mapped: 166164 kB
- Shmem: 12932 kB
- KReclaimable: 1275120 kB
- Slab: 1495480 kB
- SReclaimable: 1275120 kB
- SUnreclaim: 220360 kB
- KernelStack: 7072 kB
- PageTables: 11936 kB
- NFS_Unstable: 0 kB
- Bounce: 0 kB
- WritebackTmp: 0 kB
- CommitLimit: 7681268 kB
- Committed_AS: 4976100 kB
- VmallocTotal: 34359738367 kB
- VmallocUsed: 25532 kB
- VmallocChunk: 0 kB
- Percpu: 23200 kB
- HardwareCorrupted: 0 kB
- AnonHugePages: 202752 kB
- ShmemHugePages: 0 kB
- ShmemPmdMapped: 0 kB
- FileHugePages: 0 kB
- FilePmdMapped: 0 kB
- CmaTotal: 0 kB
- CmaFree: 0 kB
- HugePages_Total: 0
- HugePages_Free: 0
- HugePages_Rsvd: 0
- HugePages_Surp: 0
- Hugepagesize: 2048 kB
- Hugetlb: 0 kB
- DirectMap4k: 4637504 kB
- DirectMap2M: 11087872 kB
- DirectMap1G: 2097152 kB
- SNIP
- end
-
- describe '.memory_usage_rss' do
- context 'without PID' do
- it "returns a hash containing RSS metrics in bytes for current process" do
- mock_existing_proc_file('/proc/self/status', proc_status)
-
- expect(described_class.memory_usage_rss).to eq(
- total: 2527232,
- anon: 266240,
- file: 1048576
- )
- end
- end
-
- context 'with PID' do
- it "returns a hash containing RSS metrics in bytes for given process" do
- mock_existing_proc_file('/proc/7/status', proc_status)
-
- expect(described_class.memory_usage_rss(pid: 7)).to eq(
- total: 2527232,
- anon: 266240,
- file: 1048576
- )
- end
- end
- end
-
- describe '.file_descriptor_count' do
- it 'returns the amount of open file descriptors' do
- expect(Dir).to receive(:glob).and_return(['/some/path', '/some/other/path'])
-
- expect(described_class.file_descriptor_count).to eq(2)
- end
- end
-
- describe '.max_open_file_descriptors' do
- it 'returns the max allowed open file descriptors' do
- mock_existing_proc_file('/proc/self/limits', proc_limits)
-
- expect(described_class.max_open_file_descriptors).to eq(1024)
- end
- end
-
- describe '.memory_usage_uss_pss' do
- context 'without PID' do
- it "returns the current process' unique and porportional set size (USS/PSS) in bytes" do
- mock_existing_proc_file('/proc/self/smaps_rollup', proc_smaps_rollup)
-
- # (Private_Clean (152 kB) + Private_Dirty (312 kB) + Private_Hugetlb (0 kB)) * 1024
- expect(described_class.memory_usage_uss_pss).to eq(uss: 475136, pss: 515072)
- end
- end
-
- context 'with PID' do
- it "returns the given process' unique and porportional set size (USS/PSS) in bytes" do
- mock_existing_proc_file('/proc/7/smaps_rollup', proc_smaps_rollup)
-
- # (Private_Clean (152 kB) + Private_Dirty (312 kB) + Private_Hugetlb (0 kB)) * 1024
- expect(described_class.memory_usage_uss_pss(pid: 7)).to eq(uss: 475136, pss: 515072)
- end
- end
- end
-
- describe '.memory_total' do
- it "returns the current process' resident set size (RSS) in bytes" do
- mock_existing_proc_file('/proc/meminfo', mem_info)
-
- expect(described_class.memory_total).to eq(15731236864)
- end
- end
-
- describe '.process_runtime_elapsed_seconds' do
- it 'returns the seconds elapsed since the process was started' do
- # sets process starttime ticks to 1000
- mock_existing_proc_file('/proc/self/stat', proc_stat)
- # system clock ticks/sec
- expect(Etc).to receive(:sysconf).with(Etc::SC_CLK_TCK).and_return(100)
- # system uptime in seconds
- expect(::Process).to receive(:clock_gettime).and_return(15)
-
- # uptime - (starttime_ticks / ticks_per_sec)
- expect(described_class.process_runtime_elapsed_seconds).to eq(5)
- end
-
- context 'when inputs are not available' do
- it 'returns 0' do
- mock_missing_proc_file
- expect(::Process).to receive(:clock_gettime).and_raise(NameError)
-
- expect(described_class.process_runtime_elapsed_seconds).to eq(0)
- end
- end
- end
-
- describe '.summary' do
- it 'contains a selection of the available fields' do
- stub_const('RUBY_DESCRIPTION', 'ruby-3.0-patch1')
- mock_existing_proc_file('/proc/self/status', proc_status)
- mock_existing_proc_file('/proc/self/smaps_rollup', proc_smaps_rollup)
-
- summary = described_class.summary
-
- expect(summary[:version]).to eq('ruby-3.0-patch1')
- expect(summary[:gc_stat].keys).to eq(GC.stat.keys)
- expect(summary[:memory_rss]).to eq(2527232)
- expect(summary[:memory_uss]).to eq(475136)
- expect(summary[:memory_pss]).to eq(515072)
- expect(summary[:time_cputime]).to be_a(Float)
- expect(summary[:time_realtime]).to be_a(Float)
- expect(summary[:time_monotonic]).to be_a(Float)
- end
- end
- end
-
- context 'when /proc files do not exist' do
- before do
- mock_missing_proc_file
- end
-
- describe '.memory_usage_rss' do
- it 'returns 0 for all components' do
- expect(described_class.memory_usage_rss).to eq(
- total: 0,
- anon: 0,
- file: 0
- )
- end
- end
-
- describe '.memory_usage_uss_pss' do
- it "returns 0 for all components" do
- expect(described_class.memory_usage_uss_pss).to eq(uss: 0, pss: 0)
- end
- end
-
- describe '.file_descriptor_count' do
- it 'returns 0' do
- expect(Dir).to receive(:glob).and_return([])
-
- expect(described_class.file_descriptor_count).to eq(0)
- end
- end
-
- describe '.max_open_file_descriptors' do
- it 'returns 0' do
- expect(described_class.max_open_file_descriptors).to eq(0)
- end
- end
-
- describe '.summary' do
- it 'returns only available fields' do
- summary = described_class.summary
-
- expect(summary[:version]).to be_a(String)
- expect(summary[:gc_stat].keys).to eq(GC.stat.keys)
- expect(summary[:memory_rss]).to eq(0)
- expect(summary[:memory_uss]).to eq(0)
- expect(summary[:memory_pss]).to eq(0)
- expect(summary[:time_cputime]).to be_a(Float)
- expect(summary[:time_realtime]).to be_a(Float)
- expect(summary[:time_monotonic]).to be_a(Float)
- end
- end
- end
-
- describe '.cpu_time' do
- it 'returns a Float' do
- expect(described_class.cpu_time).to be_an(Float)
- end
- end
-
- describe '.real_time' do
- it 'returns a Float' do
- expect(described_class.real_time).to be_an(Float)
- end
- end
-
- describe '.monotonic_time' do
- it 'returns a Float' do
- expect(described_class.monotonic_time).to be_an(Float)
- end
- end
-
- describe '.thread_cpu_time' do
- it 'returns cpu_time on supported platform' do
- stub_const("Process::CLOCK_THREAD_CPUTIME_ID", 16)
-
- expect(Process).to receive(:clock_gettime)
- .with(16, kind_of(Symbol)) { 0.111222333 }
-
- expect(described_class.thread_cpu_time).to eq(0.111222333)
- end
-
- it 'returns nil on unsupported platform' do
- hide_const("Process::CLOCK_THREAD_CPUTIME_ID")
-
- expect(described_class.thread_cpu_time).to be_nil
- end
- end
-
- describe '.thread_cpu_duration' do
- let(:start_time) { described_class.thread_cpu_time }
-
- it 'returns difference between start and current time' do
- stub_const("Process::CLOCK_THREAD_CPUTIME_ID", 16)
-
- expect(Process).to receive(:clock_gettime)
- .with(16, kind_of(Symbol))
- .and_return(
- 0.111222333,
- 0.222333833
- )
-
- expect(described_class.thread_cpu_duration(start_time)).to eq(0.1111115)
- end
-
- it 'returns nil on unsupported platform' do
- hide_const("Process::CLOCK_THREAD_CPUTIME_ID")
-
- expect(described_class.thread_cpu_duration(start_time)).to be_nil
- end
- end
-
- def mock_existing_proc_file(path, content)
- allow(File).to receive(:open).with(path) { |_path, &block| block.call(StringIO.new(content)) }
- end
-
- def mock_missing_proc_file
- allow(File).to receive(:open).and_raise(Errno::ENOENT)
- end
-end
diff --git a/spec/lib/gitlab/middleware/path_traversal_check_spec.rb b/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
index 91081cc88ea..a0d7711c881 100644
--- a/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
+++ b/spec/lib/gitlab/middleware/path_traversal_check_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
- let(:fake_response) { [200, { 'Content-Type' => 'text/plain' }, ['OK']] }
+ let(:fake_response_status) { 200 }
+ let(:fake_response) { [fake_response_status, { 'Content-Type' => 'text/plain' }, ['OK']] }
let(:fake_app) { ->(_) { fake_response } }
let(:middleware) { described_class.new(fake_app) }
@@ -31,8 +32,11 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
.and_call_original
expect(::Gitlab::AppLogger)
.to receive(:warn)
- .with({ class_name: described_class.name, duration_ms: instance_of(Float) })
- .and_call_original
+ .with({
+ class_name: described_class.name,
+ duration_ms: instance_of(Float),
+ status: fake_response_status
+ }).and_call_original
expect(subject).to eq(fake_response)
end
@@ -61,8 +65,11 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
.not_to receive(:check_path_traversal!)
expect(::Gitlab::AppLogger)
.to receive(:warn)
- .with({ class_name: described_class.name, duration_ms: instance_of(Float) })
- .and_call_original
+ .with({
+ class_name: described_class.name,
+ duration_ms: instance_of(Float),
+ status: fake_response_status
+ }).and_call_original
expect(subject).to eq(fake_response)
end
@@ -99,7 +106,8 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
duration_ms: instance_of(Float),
message: described_class::PATH_TRAVERSAL_MESSAGE,
fullpath: fullpath,
- method: method.upcase
+ method: method.upcase,
+ status: fake_response_status
}).and_call_original
expect(subject).to eq(fake_response)
@@ -124,7 +132,8 @@ RSpec.describe ::Gitlab::Middleware::PathTraversalCheck, feature_category: :shar
class_name: described_class.name,
message: described_class::PATH_TRAVERSAL_MESSAGE,
fullpath: fullpath,
- method: method.upcase
+ method: method.upcase,
+ status: fake_response_status
}).and_call_original
expect(subject).to eq(fake_response)
diff --git a/spec/lib/gitlab/middleware/request_context_spec.rb b/spec/lib/gitlab/middleware/request_context_spec.rb
index cd21209bcee..a0a6609c8bb 100644
--- a/spec/lib/gitlab/middleware/request_context_spec.rb
+++ b/spec/lib/gitlab/middleware/request_context_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
require 'rack'
require 'request_store'
-require_relative '../../../support/helpers/next_instance_of'
+require 'gitlab/rspec/next_instance_of'
RSpec.describe Gitlab::Middleware::RequestContext, feature_category: :application_instrumentation do
include NextInstanceOf
diff --git a/spec/lib/gitlab/nav/top_nav_menu_header_spec.rb b/spec/lib/gitlab/nav/top_nav_menu_header_spec.rb
deleted file mode 100644
index d9da3ba1e46..00000000000
--- a/spec/lib/gitlab/nav/top_nav_menu_header_spec.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe ::Gitlab::Nav::TopNavMenuHeader do
- describe '.build' do
- it 'builds a hash from with the given header' do
- title = 'Test Header'
- expected = {
- title: title,
- type: :header
- }
- expect(described_class.build(title: title)).to eq(expected)
- end
- end
-end
diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb
index 9b46b8eccc8..222a730a229 100644
--- a/spec/lib/gitlab/omniauth_initializer_spec.rb
+++ b/spec/lib/gitlab/omniauth_initializer_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::OmniauthInitializer do
+RSpec.describe Gitlab::OmniauthInitializer, feature_category: :system_access do
+ include LoginHelpers
+
let(:devise_config) { class_double(Devise) }
subject(:initializer) { described_class.new(devise_config) }
@@ -224,6 +226,119 @@ RSpec.describe Gitlab::OmniauthInitializer do
subject.execute([shibboleth_config])
end
+ context 'when SAML providers are configured' do
+ it 'configures default args for a single SAML provider' do
+ stub_omniauth_config(providers: [{ name: 'saml', args: { idp_sso_service_url: 'https://saml.example.com' } }])
+
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+
+ context 'when configuration provides matching keys' do
+ before do
+ stub_omniauth_config(
+ providers: [
+ {
+ name: 'saml',
+ args: { idp_sso_service_url: 'https://saml.example.com', attribute_statements: { email: ['custom_attr'] } }
+ }
+ ]
+ )
+ end
+
+ it 'merges arguments with user configuration preference' do
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ .merge({ email: ['custom_attr'] })
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+
+ it 'merges arguments with defaults preference when REVERT_OMNIAUTH_DEFAULT_MERGING is true' do
+ stub_env('REVERT_OMNIAUTH_DEFAULT_MERGING', 'true')
+
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+ end
+
+ it 'configures defaults args for multiple SAML providers' do
+ stub_omniauth_config(
+ providers: [
+ { name: 'saml', args: { idp_sso_service_url: 'https://saml.example.com' } },
+ {
+ name: 'saml2',
+ args: { strategy_class: 'OmniAuth::Strategies::SAML', idp_sso_service_url: 'https://saml2.example.com' }
+ }
+ ]
+ )
+
+ expect(devise_config).to receive(:omniauth).with(
+ :saml,
+ {
+ idp_sso_service_url: 'https://saml.example.com',
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+ expect(devise_config).to receive(:omniauth).with(
+ :saml2,
+ {
+ idp_sso_service_url: 'https://saml2.example.com',
+ strategy_class: OmniAuth::Strategies::SAML,
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+
+ it 'merges arguments with user configuration preference for custom SAML provider' do
+ stub_omniauth_config(
+ providers: [
+ {
+ name: 'custom_saml',
+ args: {
+ strategy_class: 'OmniAuth::Strategies::SAML',
+ idp_sso_service_url: 'https://saml2.example.com',
+ attribute_statements: { email: ['custom_attr'] }
+ }
+ }
+ ]
+ )
+
+ expect(devise_config).to receive(:omniauth).with(
+ :custom_saml,
+ {
+ idp_sso_service_url: 'https://saml2.example.com',
+ strategy_class: OmniAuth::Strategies::SAML,
+ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
+ .merge({ email: ['custom_attr'] })
+ }
+ )
+
+ initializer.execute(Gitlab.config.omniauth.providers)
+ end
+ end
+
it 'configures defaults for google_oauth2' do
google_config = {
'name' => 'google_oauth2',
diff --git a/spec/lib/gitlab/pages/deployment_update_spec.rb b/spec/lib/gitlab/pages/deployment_update_spec.rb
index 9a7564ddd59..48f5b27b761 100644
--- a/spec/lib/gitlab/pages/deployment_update_spec.rb
+++ b/spec/lib/gitlab/pages/deployment_update_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Pages::DeploymentUpdate, feature_category: :pages do
let_it_be(:project, refind: true) { create(:project, :repository) }
- let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+ let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD~~').sha) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD~').sha) }
let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
@@ -137,4 +137,35 @@ RSpec.describe Gitlab::Pages::DeploymentUpdate, feature_category: :pages do
expect(pages_deployment_update).to be_valid
end
end
+
+ context 'when validating if current build is outdated' do
+ before do
+ create(:ci_job_artifact, :correct_checksum, file: file, job: build)
+ create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
+ build.reload
+ end
+
+ context 'when there is NOT a newer build' do
+ it 'does not fail' do
+ expect(pages_deployment_update).to be_valid
+ end
+ end
+
+ context 'when there is a newer build' do
+ before do
+ new_pipeline = create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha)
+ new_build = create(:ci_build, name: 'pages', pipeline: new_pipeline, ref: 'HEAD')
+ create(:ci_job_artifact, :correct_checksum, file: file, job: new_build)
+ create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: new_build)
+ create(:pages_deployment, project: project, ci_build: new_build)
+ new_build.reload
+ end
+
+ it 'fails with outdated reference message' do
+ expect(pages_deployment_update).not_to be_valid
+ expect(pages_deployment_update.errors.full_messages)
+ .to include('build SHA is outdated for this ref')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/pages/url_builder_spec.rb b/spec/lib/gitlab/pages/url_builder_spec.rb
index ae94bbadffe..1a97ca01c3e 100644
--- a/spec/lib/gitlab/pages/url_builder_spec.rb
+++ b/spec/lib/gitlab/pages/url_builder_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
let(:project_public) { true }
let(:unique_domain) { 'unique-domain' }
let(:unique_domain_enabled) { false }
+ let(:namespace_in_path) { false }
let(:project_setting) do
instance_double(
@@ -43,7 +44,8 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
protocol: 'http',
artifacts_server: artifacts_server,
access_control: access_control,
- port: port
+ port: port,
+ namespace_in_path: namespace_in_path
)
end
@@ -52,63 +54,131 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to eq('http://group.example.com/project') }
- context 'when namespace is upper cased' do
- let(:full_path) { 'Group/project' }
+ context 'when namespace_in_path is false' do
+ let(:namespace_in_path) { false }
- it { is_expected.to eq('http://group.example.com/project') }
- end
+ context 'when namespace is upper cased' do
+ let(:full_path) { 'Group/project' }
- context 'when project is in a nested group page' do
- let(:full_path) { 'group/subgroup/project' }
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
- it { is_expected.to eq('http://group.example.com/subgroup/project') }
- end
+ context 'when project is in a nested group page' do
+ let(:full_path) { 'group/subgroup/project' }
+
+ it { is_expected.to eq('http://group.example.com/subgroup/project') }
+ end
+
+ context 'when using domain pages' do
+ let(:full_path) { 'group/group.example.com' }
+
+ it { is_expected.to eq('http://group.example.com') }
+
+ context 'in development mode' do
+ let(:port) { 3010 }
+
+ before do
+ stub_rails_env('development')
+ end
+
+ it { is_expected.to eq('http://group.example.com:3010') }
+ end
+ end
+
+ context 'when not using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
- context 'when using domain pages' do
- let(:full_path) { 'group/group.example.com' }
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
- it { is_expected.to eq('http://group.example.com') }
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
- context 'in development mode' do
- let(:port) { 3010 }
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
- before do
- stub_rails_env('development')
+ it { is_expected.to eq('http://group.example.com/project') }
end
+ end
+
+ context 'when using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
+
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
+
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
+
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
- it { is_expected.to eq('http://group.example.com:3010') }
+ it { is_expected.to eq('http://unique-domain.example.com') }
+ end
end
end
- context 'when not using pages_unique_domain' do
- subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
+ context 'when namespace_in_path is true' do
+ let(:namespace_in_path) { true }
- context 'when pages_unique_domain_enabled is false' do
- let(:unique_domain_enabled) { false }
+ context 'when namespace is upper cased' do
+ let(:full_path) { 'Group/project' }
- it { is_expected.to eq('http://group.example.com/project') }
+ it { is_expected.to eq('http://example.com/group/project') }
end
- context 'when pages_unique_domain_enabled is true' do
- let(:unique_domain_enabled) { true }
+ context 'when project is in a nested group page' do
+ let(:full_path) { 'group/subgroup/project' }
- it { is_expected.to eq('http://group.example.com/project') }
+ it { is_expected.to eq('http://example.com/group/subgroup/project') }
end
- end
- context 'when using pages_unique_domain' do
- subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
+ context 'when using domain pages' do
+ let(:full_path) { 'group/group.example.com' }
- context 'when pages_unique_domain_enabled is false' do
- let(:unique_domain_enabled) { false }
+ it { is_expected.to eq('http://example.com/group/group.example.com') }
- it { is_expected.to eq('http://group.example.com/project') }
+ context 'in development mode' do
+ let(:port) { 3010 }
+
+ before do
+ stub_rails_env('development')
+ end
+
+ it { is_expected.to eq('http://example.com:3010/group/group.example.com') }
+ end
+ end
+
+ context 'when not using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: false) }
+
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
+
+ it { is_expected.to eq('http://example.com/group/project') }
+ end
+
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
+
+ it { is_expected.to eq('http://example.com/group/project') }
+ end
end
- context 'when pages_unique_domain_enabled is true' do
- let(:unique_domain_enabled) { true }
+ context 'when using pages_unique_domain' do
+ subject(:pages_url) { builder.pages_url(with_unique_domain: true) }
- it { is_expected.to eq('http://unique-domain.example.com') }
+ context 'when pages_unique_domain_enabled is false' do
+ let(:unique_domain_enabled) { false }
+
+ it { is_expected.to eq('http://example.com/group/project') }
+ end
+
+ context 'when pages_unique_domain_enabled is true' do
+ let(:unique_domain_enabled) { true }
+
+ it { is_expected.to eq('http://example.com/unique-domain') }
+ end
end
end
end
@@ -122,6 +192,12 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to be_nil }
end
+ context 'when namespace_in_path is true' do
+ let(:namespace_in_path) { true }
+
+ it { is_expected.to be_nil }
+ end
+
context 'when pages_unique_domain_enabled is true' do
let(:unique_domain_enabled) { true }
@@ -157,6 +233,19 @@ RSpec.describe Gitlab::Pages::UrlBuilder, feature_category: :pages do
it { is_expected.to eq("http://group.example.com:1234/-/project/-/jobs/1/artifacts/path/file.txt") }
end
end
+
+ context 'with namespace_in_path enabled and allowed extension' do
+ let(:artifact_name) { 'file.txt' }
+ let(:namespace_in_path) { true }
+
+ it { is_expected.to eq("http://example.com/group/-/project/-/jobs/1/artifacts/path/file.txt") }
+
+ context 'when port is configured' do
+ let(:port) { 1234 }
+
+ it { is_expected.to eq("http://example.com:1234/group/-/project/-/jobs/1/artifacts/path/file.txt") }
+ end
+ end
end
describe '#artifact_url_available?' do
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index e5958549a81..009c7299e9e 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -6,24 +6,24 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
subject { described_class }
describe '.available_for_type?' do
- it 'returns true for Group' do
- expect(subject.available_for_type?(Group.all)).to be_truthy
- end
+ it 'returns true for when class implements .supported_keyset_orderings' do
+ model = Class.new(ApplicationRecord) do
+ self.table_name = 'users'
- it 'returns true for Ci::Build' do
- expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
- end
+ def self.supported_keyset_orderings
+ { id: [:desc] }
+ end
+ end
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
+ expect(subject.available_for_type?(model.all)).to eq(true)
end
- it 'returns true for User' do
- expect(subject.available_for_type?(User.all)).to be_truthy
- end
+ it 'return false when class does not implement .supported_keyset_orderings' do
+ model = Class.new(ApplicationRecord) do
+ self.table_name = 'users'
+ end
- it 'return false for other types of relations' do
- expect(subject.available_for_type?(Issue.all)).to be_falsey
+ expect(subject.available_for_type?(model.all)).to eq(false)
end
end
@@ -68,53 +68,54 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
describe '.available?' do
let(:request_context) { double('request_context', params: { order_by: order_by, sort: sort }) }
let(:cursor_based_request_context) { Gitlab::Pagination::Keyset::CursorBasedRequestContext.new(request_context) }
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'users'
- context 'with order-by name asc' do
- let(:order_by) { :name }
- let(:sort) { :asc }
-
- it 'returns true for Group' do
- expect(subject.available?(cursor_based_request_context, Group.all)).to be_truthy
- end
-
- it 'return false for other types of relations' do
- expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
- expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
- expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey
+ def self.supported_keyset_orderings
+ { id: [:desc] }
+ end
end
end
- context 'with order-by id desc' do
+ context 'when param order is supported by the model' do
let(:order_by) { :id }
let(:sort) { :desc }
- it 'returns true for Ci::Build' do
- expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
+ it 'returns true' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(true)
end
+ end
- it 'returns true for AuditEvent' do
- expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
- end
+ context 'when sort param is not supported by the model' do
+ let(:order_by) { :id }
+ let(:sort) { :asc }
- it 'returns true for Packages::BuildInfo' do
- expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
+ it 'returns false' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
end
+ end
+
+ context 'when order_by params is not supported by the model' do
+ let(:order_by) { :name }
+ let(:sort) { :desc }
- it 'returns true for User' do
- expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy
+ it 'returns false' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
end
end
- context 'with other order-by columns' do
- let(:order_by) { :path }
- let(:sort) { :asc }
-
- it 'returns false for Group' do
- expect(subject.available?(cursor_based_request_context, Group.all)).to be_falsey
+ context 'when model does not implement .supported_keyset_orderings' do
+ let(:order_by) { :id }
+ let(:sort) { :desc }
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'users'
+ end
end
- it 'return false for other types of relations' do
- expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
+ it 'returns false' do
+ expect(subject.available?(cursor_based_request_context, model.all)).to eq(false)
end
end
end
diff --git a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
index cb3f1fe86dc..914c1e7bb74 100644
--- a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
+++ b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
+RSpec.describe Gitlab::Pagination::GitalyKeysetPager, feature_category: :source_code_management do
let(:pager) { described_class.new(request_context, project) }
let_it_be(:project) { create(:project, :repository) }
@@ -101,12 +101,17 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
allow(request_context).to receive(:request).and_return(fake_request)
allow(BranchesFinder).to receive(:===).with(finder).and_return(true)
expect(finder).to receive(:execute).with(gitaly_pagination: true).and_return(branches)
+ allow(finder).to receive(:next_cursor)
end
context 'when next page could be available' do
let(:branches) { [branch1, branch2] }
+ let(:next_cursor) { branch2.name }
+ let(:expected_next_page_link) { %(<#{incoming_api_projects_url}?#{query.merge(page_token: next_cursor).to_query}>; rel="next") }
- let(:expected_next_page_link) { %(<#{incoming_api_projects_url}?#{query.merge(page_token: branch2.name).to_query}>; rel="next") }
+ before do
+ allow(finder).to receive(:next_cursor).and_return(next_cursor)
+ end
it 'uses keyset pagination and adds link headers' do
expect(request_context).to receive(:header).with('Link', expected_next_page_link)
diff --git a/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb b/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb
deleted file mode 100644
index cd3718f5dcc..00000000000
--- a/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Patch::SidekiqScheduledEnq, :clean_gitlab_redis_queues, feature_category: :scalability do
- describe '#enqueue_jobs' do
- let_it_be(:payload) { {} }
-
- before do
- allow(Sidekiq).to receive(:load_json).and_return(payload)
-
- # stub data in both namespaces
- Gitlab::Redis::Queues.with { |c| c.zadd('resque:gitlab:schedule', 100, 'dummy') }
- Gitlab::Redis::Queues.with { |c| c.zadd('schedule', 100, 'dummy') }
- end
-
- subject { Sidekiq::Scheduled::Enq.new.enqueue_jobs }
-
- it 'polls both namespaces by default' do
- expect(Sidekiq::Client).to receive(:push).with(payload).twice
-
- subject
-
- Sidekiq.redis do |conn|
- expect(conn.zcard('schedule')).to eq(0)
- end
-
- Gitlab::Redis::Queues.with do |conn|
- expect(conn.zcard('resque:gitlab:schedule')).to eq(0)
- end
- end
-
- context 'when SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING is disabled' do
- before do
- stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'false')
- end
-
- it 'polls via Sidekiq.redis only' do
- expect(Sidekiq::Client).to receive(:push).with(payload).once
-
- subject
-
- Sidekiq.redis do |conn|
- expect(conn.zcard('schedule')).to eq(0)
- end
-
- Gitlab::Redis::Queues.with do |conn|
- expect(conn.zcard('resque:gitlab:schedule')).to eq(1)
- end
- end
- end
-
- context 'when SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING is enabled' do
- before do
- stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'true')
- end
-
- it 'polls both sets' do
- expect(Sidekiq::Client).to receive(:push).with(payload).twice
-
- subject
-
- Sidekiq.redis do |conn|
- expect(conn.zcard('schedule')).to eq(0)
- end
-
- Gitlab::Redis::Queues.with do |conn|
- expect(conn.zcard('resque:gitlab:schedule')).to eq(0)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/puma/error_handler_spec.rb b/spec/lib/gitlab/puma/error_handler_spec.rb
index 5b7cdf37af1..bfcbf32e899 100644
--- a/spec/lib/gitlab/puma/error_handler_spec.rb
+++ b/spec/lib/gitlab/puma/error_handler_spec.rb
@@ -12,11 +12,10 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
describe '#execute' do
it 'captures the exception and returns a Rack response' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
- expect(Raven).to receive(:capture_exception).with(
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
ex,
- tags: { handler: 'puma_low_level' },
- extra: { puma_env: env, status_code: status_code }
+ { puma_env: env, status_code: status_code },
+ { handler: 'puma_low_level' }
).and_call_original
status, headers, message = subject.execute(ex, env, status_code)
@@ -26,25 +25,10 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
end
- context 'when capture is not allowed' do
- it 'returns a Rack response without capturing the exception' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(false)
- expect(Raven).not_to receive(:capture_exception)
-
- status, headers, message = subject.execute(ex, env, status_code)
-
- expect(status).to eq(500)
- expect(headers).to eq({})
- expect(message).to eq(described_class::PROD_ERROR_MESSAGE)
- end
- end
-
context 'when not in production' do
let(:is_production) { false }
it 'returns a Rack response with dev error message' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
-
status, headers, message = subject.execute(ex, env, status_code)
expect(status).to eq(500)
@@ -57,9 +41,6 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
let(:status_code) { 500 }
it 'defaults to error 500' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(false)
- expect(Raven).not_to receive(:capture_exception)
-
status, headers, message = subject.execute(ex, env, status_code)
expect(status).to eq(500)
@@ -72,8 +53,6 @@ RSpec.describe Gitlab::Puma::ErrorHandler, feature_category: :shared do
let(:status_code) { 404 }
it 'uses the provided status code in the response' do
- allow(Raven.configuration).to receive(:capture_allowed?).and_return(true)
-
status, headers, message = subject.execute(ex, env, status_code)
expect(status).to eq(404)
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index 063b416c514..bb0adbc87f1 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -3,13 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
+
let(:definitions) do
Class.new do
include Gitlab::QuickActions::Dsl
- command(:reopen, :open) {}
+ command(:reopen, :open, :close) {}
command(:assign) {}
- command(:labels) {}
+ command(:label) {}
command(:power) {}
command(:noop_command)
substitution(:substitution) { 'foo' }
@@ -44,7 +46,7 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
it 'extracts command' do
msg, commands = extractor.extract_commands(original_msg)
- expect(commands).to eq [['labels', '~foo ~"bar baz" label']]
+ expect(commands).to match_array [['label', '~foo ~"bar baz" label']]
expect(msg).to eq final_msg
end
end
@@ -137,42 +139,42 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
describe 'command with multiple arguments' do
context 'at the start of content' do
it_behaves_like 'command with multiple arguments' do
- let(:original_msg) { %(/labels ~foo ~"bar baz" label\nworld) }
+ let(:original_msg) { %(/label ~foo ~"bar baz" label\nworld) }
let(:final_msg) { "world" }
end
end
context 'in the middle of content' do
it_behaves_like 'command with multiple arguments' do
- let(:original_msg) { %(hello\n/labels ~foo ~"bar baz" label\nworld) }
+ let(:original_msg) { %(hello\n/label ~foo ~"bar baz" label\nworld) }
let(:final_msg) { "hello\nworld" }
end
end
context 'in the middle of a line' do
it 'does not extract command' do
- msg = %(hello\nworld /labels ~foo ~"bar baz" label)
+ msg = %(hello\nworld /label ~foo ~"bar baz" label)
msg, commands = extractor.extract_commands(msg)
expect(commands).to be_empty
- expect(msg).to eq %(hello\nworld /labels ~foo ~"bar baz" label)
+ expect(msg).to eq %(hello\nworld /label ~foo ~"bar baz" label)
end
end
context 'at the end of content' do
it_behaves_like 'command with multiple arguments' do
- let(:original_msg) { %(hello\n/labels ~foo ~"bar baz" label) }
+ let(:original_msg) { %(hello\n/label ~foo ~"bar baz" label) }
let(:final_msg) { "hello" }
end
end
context 'when argument is not separated with a space' do
it 'does not extract command' do
- msg = %(hello\n/labels~foo ~"bar baz" label\nworld)
+ msg = %(hello\n/label~foo ~"bar baz" label\nworld)
msg, commands = extractor.extract_commands(msg)
expect(commands).to be_empty
- expect(msg).to eq %(hello\n/labels~foo ~"bar baz" label\nworld)
+ expect(msg).to eq %(hello\n/label~foo ~"bar baz" label\nworld)
end
end
end
@@ -291,98 +293,82 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
expect(msg).to eq "hello\nworld"
end
- it 'does not alter original content if no command is found' do
- msg = 'Fixes #123'
- msg, commands = extractor.extract_commands(msg)
-
- expect(commands).to be_empty
- expect(msg).to eq 'Fixes #123'
- end
-
- it 'does not get confused if command comes before an inline code' do
- msg = "/reopen\n`some inline code`\n/labels ~a\n`more inline code`"
- msg, commands = extractor.extract_commands(msg)
-
- expect(commands).to eq([['reopen'], ['labels', '~a']])
- expect(msg).to eq "`some inline code`\n`more inline code`"
- end
+ it 'extracts command when between HTML comment and HTML tags' do
+ msg = <<~MSG.strip
+ <!-- this is a comment -->
- it 'does not get confused if command comes before a blockcode' do
- msg = "/reopen\n```\nsome blockcode\n```\n/labels ~a\n```\nmore blockcode\n```"
- msg, commands = extractor.extract_commands(msg)
+ /label ~bug
- expect(commands).to eq([['reopen'], ['labels', '~a']])
- expect(msg).to eq "```\nsome blockcode\n```\n```\nmore blockcode\n```"
- end
+ <p>
+ </p>
+ MSG
- it 'does not extract commands inside a blockcode' do
- msg = "Hello\r\n```\r\nThis is some text\r\n/close\r\n/assign @user\r\n```\r\n\r\nWorld"
- expected = msg.delete("\r")
msg, commands = extractor.extract_commands(msg)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to match_array [['label', '~bug']]
+ expect(msg).to eq "<!-- this is a comment -->\n\n<p>\n</p>"
end
- it 'does not extract commands inside a blockquote' do
- msg = "Hello\r\n>>>\r\nThis is some text\r\n/close\r\n/assign @user\r\n>>>\r\n\r\nWorld"
- expected = msg.delete("\r")
+ it 'does not alter original content if no command is found' do
+ msg = 'Fixes #123'
msg, commands = extractor.extract_commands(msg)
expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(msg).to eq 'Fixes #123'
end
- it 'does not extract commands inside a HTML tag' do
- msg = "Hello\r\n<div>\r\nThis is some text\r\n/close\r\n/assign @user\r\n</div>\r\n\r\nWorld"
- expected = msg.delete("\r")
+ it 'does not get confused if command comes before an inline code' do
+ msg = "/reopen\n`some inline code`\n/label ~a\n`more inline code`"
msg, commands = extractor.extract_commands(msg)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to match_array([['reopen'], ['label', '~a']])
+ expect(msg).to eq "`some inline code`\n`more inline code`"
end
- it 'does not extract commands in multiline inline code on seperated rows' do
- msg = "Hello\r\n`\r\nThis is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
- expected = msg.delete("\r")
+ it 'does not get confused if command comes before a code block' do
+ msg = "/reopen\n```\nsome blockcode\n```\n/label ~a\n```\nmore blockcode\n```"
msg, commands = extractor.extract_commands(msg)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to match_array([['reopen'], ['label', '~a']])
+ expect(msg).to eq "```\nsome blockcode\n```\n```\nmore blockcode\n```"
end
- it 'does not extract commands in multiline inline code starting from text' do
- msg = "Hello `This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
- expected = msg.delete("\r")
- msg, commands = extractor.extract_commands(msg)
-
- expect(commands).to be_empty
- expect(msg).to eq expected
- end
+ context 'does not extract commands inside' do
+ where(:description, :text) do
+ 'block HTML tags' | "Hello\r\n<div>\r\nText\r\n/close\r\n/assign @user\r\n</div>\r\n\r\nWorld"
+ 'inline html on seperated rows' | "Text\r\n<b>\r\n/close\r\n</b>"
+ 'HTML comments' | "<!--\n/assign @user\n-->"
+ 'blockquotes' | "> Text\r\n/reopen"
+ 'multiline blockquotes' | "Hello\r\n\r\n>>>\r\nText\r\n/close\r\n/assign @user\r\n>>>\r\n\r\nWorld"
+ 'code blocks' | "Hello\r\n```\r\nText\r\n/close\r\n/assign @user\r\n```\r\n\r\nWorld"
+ 'inline code on seperated rows' | "Hello `Text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
+ end
- it 'does not extract commands in inline code' do
- msg = "`This is some text\r\n/close\r\n/assign @user\r\n`\r\n\r\nWorld"
- expected = msg.delete("\r")
- msg, commands = extractor.extract_commands(msg)
+ with_them do
+ specify do
+ expected = text.delete("\r")
+ msg, commands = extractor.extract_commands(text)
- expect(commands).to be_empty
- expect(msg).to eq expected
+ expect(commands).to be_empty
+ expect(msg).to eq expected
+ end
+ end
end
it 'limits to passed commands when they are passed' do
msg = <<~MSG.strip
Hello, we should only extract the commands passed
/reopen
- /labels hello world
+ /label hello world
/power
MSG
expected_msg = <<~EXPECTED.strip
Hello, we should only extract the commands passed
/power
EXPECTED
- expected_commands = [['reopen'], ['labels', 'hello world']]
+ expected_commands = [['reopen'], ['label', 'hello world']]
- msg, commands = extractor.extract_commands(msg, only: [:open, :labels])
+ msg, commands = extractor.extract_commands(msg, only: [:open, :label])
expect(commands).to eq(expected_commands)
expect(msg).to eq expected_msg
@@ -398,14 +384,13 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
end
describe '#redact_commands' do
- using RSpec::Parameterized::TableSyntax
-
where(:text, :expected) do
- "hello\n/labels ~label1 ~label2\nworld" | "hello\n`/labels ~label1 ~label2`\nworld"
- "hello\n/open\n/labels ~label1\nworld" | "hello\n`/open`\n`/labels ~label1`\nworld"
- "hello\n/reopen\nworld" | "hello\n`/reopen`\nworld"
- "/reopen\nworld" | "`/reopen`\nworld"
- "hello\n/open" | "hello\n`/open`"
+ "hello\n/label ~label1 ~label2\nworld" | "hello\n`/label ~label1 ~label2`\nworld"
+ "hello\n/open\n/label ~label1\nworld" | "hello\n`/open`\n`/label ~label1`\nworld"
+ "hello\n/reopen\nworld" | "hello\n`/reopen`\nworld"
+ "/reopen\nworld" | "`/reopen`\nworld"
+ "hello\n/open" | "hello\n`/open`"
+ "<!--\n/assign @user\n-->" | "<!--\n/assign @user\n-->"
end
with_them do
diff --git a/spec/lib/gitlab/redis/buffered_counter_spec.rb b/spec/lib/gitlab/redis/buffered_counter_spec.rb
new file mode 100644
index 00000000000..ef17b90d406
--- /dev/null
+++ b/spec/lib/gitlab/redis/buffered_counter_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::BufferedCounter, feature_category: :redis do
+ include_examples "redis_new_instance_shared_examples", 'buffered_counter', Gitlab::Redis::SharedState
+end
diff --git a/spec/lib/gitlab/redis/db_load_balancing_spec.rb b/spec/lib/gitlab/redis/db_load_balancing_spec.rb
index d3d3ced62a9..c3209ee12e5 100644
--- a/spec/lib/gitlab/redis/db_load_balancing_spec.rb
+++ b/spec/lib/gitlab/redis/db_load_balancing_spec.rb
@@ -5,40 +5,4 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::DbLoadBalancing, feature_category: :scalability do
include_examples "redis_new_instance_shared_examples", 'db_load_balancing', Gitlab::Redis::SharedState
include_examples "redis_shared_examples"
-
- describe '#pool' do
- let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
- let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
-
- subject { described_class.pool }
-
- before do
- allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
-
- # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
- allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(mktmpdir)
- allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
- end
-
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- it 'instantiates an instance of MultiStore' do
- subject.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
-
- expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
- expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
-
- expect(redis_instance.instance_name).to eq('DbLoadBalancing')
- end
- end
-
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_db_load_balancing,
- :use_primary_store_as_default_for_db_load_balancing
- end
end
diff --git a/spec/lib/gitlab/redis/sidekiq_status_spec.rb b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
deleted file mode 100644
index 45578030ca8..00000000000
--- a/spec/lib/gitlab/redis/sidekiq_status_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Redis::SidekiqStatus do
- # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
- # to move away from `Sidekiq.redis` for sidekiq status data. Thus, we use the
- # same store configuration as the former.
- let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
-
- include_examples "redis_shared_examples"
-
- describe '#pool' do
- let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
- let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
- let(:rails_root) { mktmpdir }
-
- subject { described_class.pool }
-
- before do
- # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists
- allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(rails_root)
- allow(Gitlab::Redis::Queues).to receive(:rails_root).and_return(rails_root)
-
- allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
- allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
- end
-
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- it 'instantiates an instance of MultiStore' do
- subject.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
-
- expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
- expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
-
- expect(redis_instance.instance_name).to eq('SidekiqStatus')
- end
- end
-
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sidekiq_status,
- :use_primary_store_as_default_for_sidekiq_status
- end
-
- describe '#store_name' do
- it 'returns the name of the SharedState store' do
- expect(described_class.store_name).to eq('SharedState')
- end
- end
-end
diff --git a/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb
index 4bd4455d1bd..f4f38a861ee 100644
--- a/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb
+++ b/spec/lib/gitlab/seeders/ci/catalog/resource_seeder_spec.rb
@@ -7,10 +7,11 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
let_it_be_with_reload(:group) { create(:group) }
let_it_be(:seed_count) { 2 }
let_it_be(:last_resource_id) { seed_count - 1 }
+ let(:publish) { true }
let(:group_path) { group.path }
- subject(:seeder) { described_class.new(group_path: group_path, seed_count: seed_count) }
+ subject(:seeder) { described_class.new(group_path: group_path, seed_count: seed_count, publish: publish) }
before_all do
group.add_owner(admin)
@@ -28,12 +29,26 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
end
context 'when project name already exists' do
- before do
- create(:project, namespace: group, name: "ci_seed_resource_0")
+ context 'in the same group' do
+ before do
+ create(:project, namespace: group, name: 'ci_seed_resource_0')
+ end
+
+ it 'skips that project creation and keeps seeding' do
+ expect { seed }.to change { Project.count }.by(seed_count - 1)
+ end
end
- it 'skips that project creation and keeps seeding' do
- expect { seed }.to change { Project.count }.by(seed_count - 1)
+ context 'in a different group' do
+ let(:new_group) { create(:group) }
+
+ before do
+ create(:project, namespace: new_group, name: 'ci_seed_resource_0')
+ end
+
+ it 'executes the project creation' do
+ expect { seed }.to change { Project.count }.by(seed_count)
+ end
end
end
@@ -65,6 +80,26 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
end
end
+ describe 'publish argument' do
+ context 'when false' do
+ let(:publish) { false }
+
+ it 'creates catalog resources in draft state' do
+ group.projects.each do |project|
+ expect(project.catalog_resource.state).to be('draft')
+ end
+ end
+ end
+
+ context 'when true' do
+ it 'creates catalog resources in published state' do
+ group.projects.each do |project|
+ expect(project.catalog_resource&.state).to be('published')
+ end
+ end
+ end
+ end
+
it 'skips seeding a project if the project name already exists' do
# We call the same command twice, as it means it would try to recreate
# projects that were already created!
@@ -87,12 +122,11 @@ RSpec.describe ::Gitlab::Seeders::Ci::Catalog::ResourceSeeder, feature_category:
project = group.projects.last
default_branch = project.default_branch_or_main
- expect(project.repository.blob_at(default_branch, "README.md")).not_to be_nil
- expect(project.repository.blob_at(default_branch, "template.yml")).not_to be_nil
+ expect(project.repository.blob_at(default_branch, 'README.md')).not_to be_nil
+ expect(project.repository.blob_at(default_branch, 'templates/component.yml')).not_to be_nil
end
- # This should be run again when fixing: https://gitlab.com/gitlab-org/gitlab/-/issues/429649
- xit 'creates projects with CI catalog resources' do
+ it 'creates projects with CI catalog resources' do
expect { seed }.to change { Project.count }.by(seed_count)
expect(group.projects.all?(&:catalog_resource)).to eq true
diff --git a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
index a15dbccc80c..930782dfadf 100644
--- a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
+++ b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
NULL_LOGGER = Gitlab::JsonLogger.new('/dev/null')
TAG_LIST = Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder::TAG_LIST.to_set
-RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetPipelineSeeder, feature_category: :runner_fleet do
+RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetPipelineSeeder, feature_category: :fleet_visibility do
subject(:seeder) do
described_class.new(NULL_LOGGER, projects_to_runners: projects_to_runners, job_count: job_count)
end
diff --git a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
index 4597cc6b315..01cbce28159 100644
--- a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
+++ b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
NULL_LOGGER = Gitlab::JsonLogger.new('/dev/null')
-RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder, feature_category: :runner_fleet do
+RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder, feature_category: :fleet_visibility do
let_it_be(:user) { create(:user, :admin, username: 'test-admin') }
subject(:seeder) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index dbfab116479..5724c58f1a4 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -106,21 +106,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
end
context 'when TTL option is not set' do
- context 'when reduce_duplicate_job_key_ttl is enabled' do
- let(:expected_ttl) { described_class::SHORT_DUPLICATE_KEY_TTL }
+ let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
- it_behaves_like 'sets Redis keys with correct TTL'
- end
-
- context 'when reduce_duplicate_job_key_ttl is disabled' do
- before do
- stub_feature_flags(reduce_duplicate_job_key_ttl: false)
- end
-
- let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
-
- it_behaves_like 'sets Redis keys with correct TTL'
- end
+ it_behaves_like 'sets Redis keys with correct TTL'
end
context 'when TTL option is set' do
@@ -270,7 +258,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
}
end
- let(:argv) { ['main', 9, 'loc1', 'ci', nil, 'loc2'] }
+ let(:argv) { ['main', 9, 'loc1', 'ci', '', 'loc2'] }
it 'only updates the main connection' do
subject
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb
new file mode 100644
index 00000000000..470c860fb60
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/strategies/click_house_migration_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::Strategies::ClickHouseMigration, feature_category: :database do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestPauseWorker'
+ end
+
+ include ::ApplicationWorker
+ include ::ClickHouseWorker
+
+ def perform(*); end
+ end
+ end
+
+ before do
+ stub_const('TestPauseWorker', worker_class)
+ end
+
+ describe '#call' do
+ include Gitlab::ExclusiveLeaseHelpers
+
+ shared_examples 'a worker being executed' do
+ it 'schedules the job' do
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).not_to receive(:add_to_waiting_queue!)
+
+ worker_class.perform_async('args1')
+
+ expect(worker_class.jobs.count).to eq(1)
+ end
+ end
+
+ context 'when lock is not taken' do
+ it_behaves_like 'a worker being executed'
+ end
+
+ context 'when lock is taken' do
+ include ExclusiveLeaseHelpers
+
+ around do |example|
+ ClickHouse::MigrationSupport::ExclusiveLock.execute_migration do
+ example.run
+ end
+ end
+
+ it 'does not schedule the job' do
+ expect(Gitlab::SidekiqMiddleware::PauseControl::PauseControlService).to receive(:add_to_waiting_queue!).once
+
+ worker_class.perform_async('args1')
+
+ expect(worker_class.jobs.count).to eq(0)
+ end
+
+ context 'when pause_clickhouse_workers_during_migration FF is disabled' do
+ before do
+ stub_feature_flags(pause_clickhouse_workers_during_migration: false)
+ end
+
+ it_behaves_like 'a worker being executed'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb
new file mode 100644
index 00000000000..1aa4b470db0
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control/workers_map_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::PauseControl::WorkersMap, feature_category: :global_search do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestPauseWorker'
+ end
+
+ include ApplicationWorker
+
+ pause_control :zoekt
+
+ def perform(*); end
+ end
+ end
+
+ before do
+ stub_const('TestPauseWorker', worker_class)
+ end
+
+ describe '.strategy_for' do
+ it 'accepts classname' do
+ expect(described_class.strategy_for(worker: worker_class)).to eq(:zoekt)
+ end
+
+ it 'accepts worker instance' do
+ expect(described_class.strategy_for(worker: worker_class.new)).to eq(:zoekt)
+ end
+
+ it 'returns nil for unknown worker' do
+ expect(described_class.strategy_for(worker: described_class)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb b/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
index a0cce0f61a0..2cb98b43051 100644
--- a/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/pause_control_spec.rb
@@ -1,19 +1,23 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'rspec-parameterized'
RSpec.describe Gitlab::SidekiqMiddleware::PauseControl, feature_category: :global_search do
describe '.for' do
- it 'returns the right class for `zoekt`' do
- expect(described_class.for(:zoekt)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt)
- end
+ using RSpec::Parameterized::TableSyntax
- it 'returns the right class for `none`' do
- expect(described_class.for(:none)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
+ where(:strategy_name, :expected_class) do
+ :none | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None
+ :unknown | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None
+ :click_house_migration | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::ClickHouseMigration
+ :zoekt | ::Gitlab::SidekiqMiddleware::PauseControl::Strategies::Zoekt
end
- it 'returns nil when passing an unknown key' do
- expect(described_class.for(:unknown)).to eq(::Gitlab::SidekiqMiddleware::PauseControl::Strategies::None)
+ with_them do
+ it 'returns the right class' do
+ expect(described_class.for(strategy_name)).to eq(expected_class)
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 5a38d1b7750..a5c6df5e9d5 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'sidekiq/testing'
-RSpec.describe Gitlab::SidekiqMiddleware do
+RSpec.describe Gitlab::SidekiqMiddleware, feature_category: :shared do
let(:job_args) { [0.01] }
let(:disabled_sidekiq_middlewares) { [] }
let(:chain) { Sidekiq::Middleware::Chain.new(Sidekiq) }
@@ -33,6 +33,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
configurator.call(chain)
stub_feature_flags("drop_sidekiq_jobs_#{worker_class.name}": false) # not dropping the job
end
+
it "passes through the right middlewares", :aggregate_failures do
enabled_sidekiq_middlewares.each do |middleware|
expect_next_instances_of(middleware, 1, true) do |middleware_instance|
@@ -68,6 +69,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
::Gitlab::SidekiqVersioning::Middleware,
::Gitlab::SidekiqStatus::ServerMiddleware,
::Gitlab::SidekiqMiddleware::WorkerContext::Server,
+ ::ClickHouse::MigrationSupport::SidekiqMiddleware,
::Gitlab::SidekiqMiddleware::DuplicateJobs::Server,
::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware,
::Gitlab::SidekiqMiddleware::SkipJobs
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index a555e6a828a..55e3885d257 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues,
+ :clean_gitlab_redis_shared_state do
shared_examples 'tracking status in redis' do
describe '.set' do
it 'stores the job ID' do
@@ -53,6 +54,31 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
end
end
+ describe '.expire' do
+ it 'refreshes the expiration time if key is present' do
+ described_class.set('123', 1.minute)
+ described_class.expire('123', 1.hour)
+
+ key = described_class.key_for('123')
+
+ with_redis do |redis|
+ expect(redis.exists?(key)).to eq(true)
+ expect(redis.ttl(key) > 5.minutes).to eq(true)
+ end
+ end
+
+ it 'does nothing if key is not present' do
+ described_class.expire('123', 1.minute)
+
+ key = described_class.key_for('123')
+
+ with_redis do |redis|
+ expect(redis.exists?(key)).to eq(false)
+ expect(redis.ttl(key)).to eq(-2)
+ end
+ end
+ end
+
describe '.all_completed?' do
it 'returns true if all jobs have been completed' do
expect(described_class.all_completed?(%w[123])).to eq(true)
@@ -133,11 +159,11 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
context 'with multi-store feature flags turned on' do
def with_redis(&block)
- Gitlab::Redis::SidekiqStatus.with(&block)
+ Gitlab::Redis::SharedState.with(&block)
end
- it 'uses Gitlab::Redis::SidekiqStatus.with' do
- expect(Gitlab::Redis::SidekiqStatus).to receive(:with).and_call_original
+ it 'uses Gitlab::Redis::SharedState.with' do
+ expect(Gitlab::Redis::SharedState).to receive(:with).and_call_original
expect(Sidekiq).not_to receive(:redis)
described_class.job_status(%w[123 456 789])
@@ -158,7 +184,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
it 'uses Sidekiq.redis' do
expect(Sidekiq).to receive(:redis).and_call_original
- expect(Gitlab::Redis::SidekiqStatus).not_to receive(:with)
+ expect(Gitlab::Redis::SharedState).not_to receive(:with)
described_class.job_status(%w[123 456 789])
end
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
index ea3c030541f..19b578a4d6d 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
+RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro, feature_category: :application_instrumentation do
include StubENV
let(:snowplow_micro_settings) do
@@ -18,6 +18,8 @@ RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
allow(Rails.env).to receive(:development?).and_return(true)
end
+ it { is_expected.to delegate_method(:flush).to(:tracker) }
+
describe '#hostname' do
context 'when snowplow_micro config is set' do
let(:address) { '127.0.0.1:9091' }
diff --git a/spec/lib/gitlab/tracking/event_definition_spec.rb b/spec/lib/gitlab/tracking/event_definition_spec.rb
index ab0660147e4..5e41c691da8 100644
--- a/spec/lib/gitlab/tracking/event_definition_spec.rb
+++ b/spec/lib/gitlab/tracking/event_definition_spec.rb
@@ -35,6 +35,33 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
expect { described_class.definitions }.not_to raise_error
end
+ it 'has no duplicated actions in InternalEventTracking events', :aggregate_failures do
+ definitions_by_action = described_class.definitions
+ .select { |d| d.category == 'InternalEventTracking' }
+ .group_by(&:action)
+
+ definitions_by_action.each do |action, definitions|
+ expect(definitions.size).to eq(1),
+ "Multiple definitions use the action '#{action}': #{definitions.map(&:path).join(', ')}"
+ end
+ end
+
+ it 'has event definitions for all events used in Internal Events metric definitions', :aggregate_failures do
+ from_metric_definitions = Gitlab::Usage::MetricDefinition.definitions
+ .values
+ .select { |m| m.attributes[:data_source] == 'internal_events' }
+ .flat_map { |m| m.events&.keys }
+ .compact
+ .uniq
+
+ event_names = Gitlab::Tracking::EventDefinition.definitions.map { |e| e.attributes[:action] }
+
+ from_metric_definitions.each do |event|
+ expect(event_names).to include(event),
+ "Event '#{event}' is used in Internal Events but does not have an event definition yet. Please define it."
+ end
+ end
+
describe '#validate' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index f3e27c72143..46213532071 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation do
include StubENV
+ using RSpec::Parameterized::TableSyntax
before do
stub_application_setting(snowplow_enabled: true)
@@ -17,6 +18,8 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
described_class.instance_variable_set(:@tracker, nil)
end
+ it { is_expected.to delegate_method(:flush).to(:tracker) }
+
describe '.options' do
shared_examples 'delegates to destination' do |klass|
before do
@@ -295,29 +298,57 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
end
describe 'snowplow_micro_enabled?' do
- before do
- allow(Rails.env).to receive(:development?).and_return(true)
+ where(:development?, :micro_verification_enabled?, :snowplow_micro_enabled, :result) do
+ true | true | true | true
+ true | true | false | false
+ false | true | true | true
+ false | true | false | false
+ false | false | true | false
+ false | false | false | false
+ true | false | true | true
+ true | false | false | false
end
- it 'returns true when snowplow_micro is enabled' do
- stub_config(snowplow_micro: { enabled: true })
-
- expect(described_class).to be_snowplow_micro_enabled
- end
+ with_them do
+ before do
+ allow(Rails.env).to receive(:development?).and_return(development?)
+ allow(described_class).to receive(:micro_verification_enabled?).and_return(micro_verification_enabled?)
+ stub_config(snowplow_micro: { enabled: snowplow_micro_enabled })
+ end
- it 'returns false when snowplow_micro is disabled' do
- stub_config(snowplow_micro: { enabled: false })
+ subject { described_class.snowplow_micro_enabled? }
- expect(described_class).not_to be_snowplow_micro_enabled
+ it { is_expected.to be(result) }
end
it 'returns false when snowplow_micro is not configured' do
+ allow(Rails.env).to receive(:development?).and_return(true)
allow(Gitlab.config).to receive(:snowplow_micro).and_raise(GitlabSettings::MissingSetting)
expect(described_class).not_to be_snowplow_micro_enabled
end
end
+ describe '.micro_verification_enabled?' do
+ where(:verify_tracking, :result) do
+ nil | false
+ 'true' | true
+ 'false' | false
+ '0' | false
+ '1' | true
+ end
+
+ with_them do
+ before do
+ stub_env('VERIFY_TRACKING', verify_tracking)
+ end
+
+ subject { described_class.micro_verification_enabled? }
+
+ it { is_expected.to be(result) }
+ end
+ end
+
describe 'tracker' do
it 'returns a SnowPlowMicro instance in development' do
allow(Rails.env).to receive(:development?).and_return(true)
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 0f827921a66..9e98cdc05eb 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -265,6 +265,19 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
end
end
+ context 'when resolving runs into a timeout' do
+ let(:import_url) { 'http://example.com' }
+
+ before do
+ stub_const("#{described_class}::GETADDRINFO_TIMEOUT_SECONDS", 1)
+ allow(Addrinfo).to receive(:getaddrinfo) { sleep 2 }
+ end
+
+ it 'raises an error due to DNS timeout' do
+ expect { subject }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError, "execution expired")
+ end
+ end
+
context 'when the URL hostname is a domain' do
context 'when domain can be resolved' do
let(:import_url) { 'https://example.org' }
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 08adc031631..fb46d48c1bb 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -46,6 +46,34 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
end
end
+ describe '.instrumentation_class' do
+ context 'for non internal events' do
+ let(:attributes) { { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', data_source: 'redis_hll' } }
+
+ it 'returns class from the definition' do
+ expect(definition.instrumentation_class).to eq('RedisHLLMetric')
+ end
+ end
+
+ context 'for internal events' do
+ context 'for total counter' do
+ let(:attributes) { { key_path: 'metric1', data_source: 'internal_events', events: [{ name: 'a' }] } }
+
+ it 'returns TotalCounterMetric' do
+ expect(definition.instrumentation_class).to eq('TotalCountMetric')
+ end
+ end
+
+ context 'for uniq counter' do
+ let(:attributes) { { key_path: 'metric1', data_source: 'internal_events', events: [{ name: 'a', unique: :id }] } }
+
+ it 'returns RedisHLLMetric' do
+ expect(definition.instrumentation_class).to eq('RedisHLLMetric')
+ end
+ end
+ end
+ end
+
describe 'not_removed' do
let(:all_definitions) do
metrics_definitions = [
@@ -71,12 +99,13 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
describe '#with_instrumentation_class' do
let(:all_definitions) do
metrics_definitions = [
- { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'active' },
- { key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'broken' },
- { key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'active' },
- { key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: 'removed' },
- { key_path: 'metric5', status: 'active' },
- { key_path: 'metric_missing_status' }
+ { key_path: 'metric1', status: 'active', data_source: 'redis_hll', instrumentation_class: 'RedisHLLMetric' },
+ { key_path: 'metric2', status: 'active', data_source: 'internal_events' }, # class is defined by data_source
+
+ { key_path: 'metric3', status: 'active', data_source: 'redis_hll' },
+ { key_path: 'metric4', status: 'removed', instrumentation_class: 'RedisHLLMetric', data_source: 'redis_hll' },
+ { key_path: 'metric5', status: 'removed', data_source: 'internal_events' },
+ { key_path: 'metric_missing_status', data_source: 'internal_events' }
]
metrics_definitions.map { |definition| described_class.new(definition[:key_path], definition.symbolize_keys) }
end
@@ -86,15 +115,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
end
it 'includes definitions with instrumentation_class' do
- expect(described_class.with_instrumentation_class.count).to eq(3)
- end
-
- context 'with removed metric' do
- let(:metric_status) { 'removed' }
-
- it 'excludes removed definitions' do
- expect(described_class.with_instrumentation_class.count).to eq(3)
- end
+ expect(described_class.with_instrumentation_class.map(&:key_path)).to match_array(%w[metric1 metric2])
end
end
@@ -224,25 +245,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
where(:instrumentation_class, :options, :events, :is_valid) do
'AnotherClass' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
- nil | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | true
+ 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
'RedisHLLMetric' | { events: ['a'] } | nil | false
- 'RedisHLLMetric' | nil | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'a' }] | false
- 'RedisHLLMetric' | { events: 'a' } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: [2] } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'], a: 'b' } | [{ name: 'a', unique: 'user.id' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id', b: 'c' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ name: 'a' }] | false
- 'RedisHLLMetric' | { events: ['a'] } | [{ unique: 'user.id' }] | false
- 'TotalCountMetric' | { events: ['a'] } | [{ name: 'a' }] | true
- 'TotalCountMetric' | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | false
- 'TotalCountMetric' | { events: ['a'] } | nil | false
- 'TotalCountMetric' | nil | [{ name: 'a' }] | false
- 'TotalCountMetric' | { events: [2] } | [{ name: 'a' }] | false
- 'TotalCountMetric' | { events: ['a'] } | [{}] | false
- 'TotalCountMetric' | 'a' | [{ name: 'a' }] | false
- 'TotalCountMetric' | { events: ['a'], a: 'b' } | [{ name: 'a' }] | false
+ nil | { events: ['a'] } | [{ name: 'a', unique: 'user.id' }] | true
end
with_them do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..90791bf223f
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/bulk_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BulkImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"bulk_imports\".\"user_id\") FROM \"bulk_imports\"" }
+
+ before_all do
+ import = create :bulk_import, created_at: 3.days.ago
+ create :bulk_import, created_at: 35.days.ago
+ create :bulk_import, created_at: 3.days.ago
+ create :bulk_import, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"bulk_imports\".\"user_id\") FROM \"bulk_imports\" " \
+ "WHERE \"bulk_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb
new file mode 100644
index 00000000000..6d10052ff66
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_service_desk_custom_email_enabled_metric_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountServiceDeskCustomEmailEnabledMetric, feature_category: :service_ping do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:credential) { create(:service_desk_custom_email_credential, project: project) }
+ let_it_be(:verification) { create(:service_desk_custom_email_verification, :finished, project: project) }
+ let_it_be(:setting) do
+ create(:service_desk_setting, project: project, custom_email: 'support@example.com', custom_email_enabled: true)
+ end
+
+ let(:expected_value) { 1 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..1f620c2502d
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/csv_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CsvImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"csv_issue_imports\".\"user_id\") FROM \"csv_issue_imports\"" }
+
+ before_all do
+ import = create :issue_csv_import, created_at: 3.days.ago
+ create :issue_csv_import, created_at: 35.days.ago
+ create :issue_csv_import, created_at: 3.days.ago
+ create :issue_csv_import, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"csv_issue_imports\".\"user_id\") FROM \"csv_issue_imports\" " \
+ "WHERE \"csv_issue_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb
new file mode 100644
index 00000000000..e9814f0cb51
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_config_metric_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GitlabConfigMetric, feature_category: :service_ping do
+ describe 'config metric' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:config_value, :expected_value) do
+ false | false
+ true | true
+ end
+
+ with_them do
+ before do
+ stub_config(artifacts: { object_store: { enabled: config_value } })
+ end
+
+ it_behaves_like 'a correct instrumented metric value', {
+ time_frame: 'none',
+ options: {
+ config: {
+ artifacts: {
+ object_store: 'enabled'
+ }
+ }
+ }
+ }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb
new file mode 100644
index 00000000000..26210b9febf
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_settings_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GitlabSettingsMetric, feature_category: :service_ping do
+ describe 'settings metric' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:setting_value, :expected_value) do
+ false | false
+ true | true
+ end
+
+ with_them do
+ before do
+ stub_application_setting(gravatar_enabled: setting_value)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', {
+ time_frame: 'none',
+ options: {
+ setting_method: 'gravatar_enabled'
+ }
+ }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..6b7962fda64
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/group_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GroupImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"group_import_states\".\"user_id\") FROM \"group_import_states\"" }
+
+ before_all do
+ import = create :group_import_state, created_at: 3.days.ago
+ create :group_import_state, created_at: 35.days.ago
+ create :group_import_state, created_at: 3.days.ago
+ create :group_import_state, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"group_import_states\".\"user_id\") FROM \"group_import_states\" " \
+ "WHERE \"group_import_states\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
deleted file mode 100644
index 91ad81c4291..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
+++ /dev/null
@@ -1,55 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailCtaClickedMetric do
- using RSpec::Parameterized::TableSyntax
-
- let(:email_attributes) { { cta_clicked_at: Date.yesterday, track: 'verify', series: 0 } }
- let(:options) { { track: 'verify', series: 0 } }
- let(:expected_value) { 2 }
- let(:expected_query) do
- 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL ' \
- 'AND "in_product_marketing_emails"."series" = 0 ' \
- 'AND "in_product_marketing_emails"."track" = 1'
- end
-
- before do
- create_list :in_product_marketing_email, 2, email_attributes
-
- create :in_product_marketing_email, email_attributes.merge(cta_clicked_at: nil)
- create :in_product_marketing_email, email_attributes.merge(track: 'team')
- create :in_product_marketing_email, email_attributes.merge(series: 1)
- end
-
- it_behaves_like 'a correct instrumented metric value and query', {
- options: { track: 'verify', series: 0 },
- time_frame: 'all'
- }
-
- where(:options_key, :valid_value, :invalid_value) do
- :track | 'admin_verify' | 'invite_team'
- :series | 1 | 5
- end
-
- with_them do
- it "raises an exception if option is not present" do
- expect do
- described_class.new(options: options.except(options_key), time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "raises an exception if option has invalid value" do
- expect do
- options[options_key] = invalid_value
- described_class.new(options: options, time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "doesn't raise exceptions if option has valid value" do
- options[options_key] = valid_value
- described_class.new(options: options, time_frame: 'all')
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
deleted file mode 100644
index 3c51368f396..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailSentMetric do
- using RSpec::Parameterized::TableSyntax
-
- let(:email_attributes) { { track: 'verify', series: 0 } }
- let(:expected_value) { 2 }
- let(:expected_query) do
- 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."series" = 0 ' \
- 'AND "in_product_marketing_emails"."track" = 1'
- end
-
- before do
- create_list :in_product_marketing_email, 2, email_attributes
-
- create :in_product_marketing_email, email_attributes.merge(track: 'team')
- create :in_product_marketing_email, email_attributes.merge(series: 1)
- end
-
- it_behaves_like 'a correct instrumented metric value and query', {
- options: { track: 'verify', series: 0 },
- time_frame: 'all'
- }
-
- where(:options_key, :valid_value, :invalid_value) do
- :track | 'admin_verify' | 'invite_team'
- :series | 1 | 5
- end
-
- with_them do
- it "raises an exception if option is not present" do
- expect do
- described_class.new(options: email_attributes.except(options_key), time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "raises an exception if option has invalid value" do
- expect do
- email_attributes[options_key] = invalid_value
- described_class.new(options: email_attributes, time_frame: 'all')
- end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
- end
-
- it "doesn't raise exceptions if option has valid value" do
- email_attributes[options_key] = valid_value
- described_class.new(options: email_attributes, time_frame: 'all')
- end
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb
new file mode 100644
index 00000000000..86bc4d98372
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_users_metric_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::JiraImportsUsersMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) { "SELECT COUNT(DISTINCT \"jira_imports\".\"user_id\") FROM \"jira_imports\"" }
+
+ before_all do
+ import = create :jira_import_state, created_at: 3.days.ago
+ create :jira_import_state, created_at: 35.days.ago
+ create :jira_import_state, created_at: 3.days.ago
+ create :jira_import_state, created_at: 3.days.ago, user: import.user
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"jira_imports\".\"user_id\") FROM \"jira_imports\" " \
+ "WHERE \"jira_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb
new file mode 100644
index 00000000000..20390e6abd9
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/omniauth_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::OmniauthEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Auth).to receive(:omniauth_enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb
new file mode 100644
index 00000000000..2a0e0a1a591
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/project_imports_creators_metric_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ProjectImportsCreatorsMetric, feature_category: :importers do
+ let(:expected_value) { 3 }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"projects\".\"creator_id\") FROM \"projects\" " \
+ "WHERE \"projects\".\"import_type\" IS NOT NULL"
+ end
+
+ before_all do
+ project = create :project, import_type: :jira, created_at: 3.days.ago
+ create :project, import_type: :jira, created_at: 35.days.ago
+ create :project, import_type: :jira, created_at: 3.days.ago
+ create :project, created_at: 3.days.ago
+ create :project, import_type: :jira, created_at: 3.days.ago, creator: project.creator
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 2 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"projects\".\"creator_id\") FROM \"projects\" WHERE " \
+ "\"projects\".\"import_type\" IS NOT NULL AND \"projects\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb
new file mode 100644
index 00000000000..dbd44cc3309
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::PrometheusEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb
new file mode 100644
index 00000000000..3e6812f3b34
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/prometheus_metrics_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::PrometheusMetricsEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Metrics).to receive(:prometheus_metrics_enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb
new file mode 100644
index 00000000000..12eab4bb422
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/reply_by_email_enabled_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ReplyByEmailEnabledMetric, feature_category: :service_ping do
+ before do
+ allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(expected_value)
+ end
+
+ [true, false].each do |setting|
+ context "when the setting is #{setting}" do
+ let(:expected_value) { setting }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
index f3aa1ba4f88..b357d6ea7e4 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/total_count_metric_spec.rb
@@ -9,32 +9,112 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric, :clea
end
context 'with multiple similar events' do
- let(:expected_value) { 10 }
-
before do
+ last_week = Date.today - 7.days
+ two_weeks_ago = last_week - 1.week
+
+ redis_counter_key = described_class.redis_key('my_event', last_week)
+ 2.times do
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
+ redis_counter_key = described_class.redis_key('my_event', two_weeks_ago)
+ 3.times do
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
10.times do
Gitlab::InternalEvents.track_event('my_event')
end
end
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', events: [{ name: 'my_event' }] }
+ context "with an 'all' time_frame" do
+ let(:expected_value) { 10 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', events: [{ name: 'my_event' }] }
+ end
+
+ context "with a 7d time_frame" do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', events: [{ name: 'my_event' }] }
+ end
+
+ context "with a 28d time_frame" do
+ let(:expected_value) { 5 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', events: [{ name: 'my_event' }] }
+ end
end
context 'with multiple different events' do
let(:expected_value) { 2 }
before do
+ last_week = Date.today - 7.days
+ two_weeks_ago = last_week - 1.week
+
+ 2.times do
+ redis_counter_key =
+ Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key('my_event1', last_week)
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
+ 3.times do
+ redis_counter_key =
+ Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key('my_event1', two_weeks_ago)
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
+ 4.times do
+ redis_counter_key =
+ Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key('my_event2', last_week)
+ Gitlab::Redis::SharedState.with { |redis| redis.incr(redis_counter_key) }
+ end
+
Gitlab::InternalEvents.track_event('my_event1')
Gitlab::InternalEvents.track_event('my_event2')
end
- it_behaves_like 'a correct instrumented metric value',
- { time_frame: 'all', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ context "with an 'all' time_frame" do
+ let(:expected_value) { 2 }
+
+ it_behaves_like 'a correct instrumented metric value',
+ { time_frame: 'all', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ end
+
+ context "with a 7d time_frame" do
+ let(:expected_value) { 6 }
+
+ it_behaves_like 'a correct instrumented metric value',
+ { time_frame: '7d', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ end
+
+ context "with a 28d time_frame" do
+ let(:expected_value) { 9 }
+
+ it_behaves_like 'a correct instrumented metric value',
+ { time_frame: '28d', events: [{ name: 'my_event1' }, { name: 'my_event2' }] }
+ end
+ end
+
+ context "with an invalid time_frame" do
+ let(:metric) { described_class.new(time_frame: '14d', events: [{ name: 'my_event' }]) }
+
+ it 'raises an exception' do
+ expect { metric.value }.to raise_error(/Unknown time frame/)
+ end
end
describe '.redis_key' do
it 'adds the key prefix to the event name' do
expect(described_class.redis_key('my_event')).to eq('{event_counters}_my_event')
end
+
+ context "with a date" do
+ it 'adds the key prefix and suffix to the event name' do
+ expect(described_class.redis_key('my_event', Date.new(2023, 10, 19))).to eq("{event_counters}_my_event-2023-42")
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb
new file mode 100644
index 00000000000..4fdabb86e23
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/unique_users_all_imports_metric_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UniqueUsersAllImportsMetric, feature_category: :importers do
+ let(:expected_value) { 6 }
+ let(:expected_query) do
+ <<~SQL.squish
+ SELECT
+ (SELECT COUNT(DISTINCT "projects"."creator_id") FROM "projects" WHERE "projects"."import_type" IS NOT NULL) +
+ (SELECT COUNT(DISTINCT "bulk_imports"."user_id") FROM "bulk_imports") +
+ (SELECT COUNT(DISTINCT "jira_imports"."user_id") FROM "jira_imports") +
+ (SELECT COUNT(DISTINCT "csv_issue_imports"."user_id") FROM "csv_issue_imports") +
+ (SELECT COUNT(DISTINCT "group_import_states"."user_id") FROM "group_import_states")
+ SQL
+ end
+
+ before_all do
+ import = create :jira_import_state, created_at: 3.days.ago
+ create :jira_import_state, created_at: 35.days.ago
+ create :jira_import_state, created_at: 3.days.ago, user: import.user
+
+ create :group_import_state, created_at: 3.days.ago
+ create :issue_csv_import, created_at: 3.days.ago
+ create :bulk_import, created_at: 3.days.ago
+ create :project, import_type: :jira, created_at: 3.days.ago
+ end
+
+ before do
+ described_class::IMPORTS_METRICS.each do |submetric_class|
+ metric = submetric_class.new(time_frame: time_frame, options: options)
+ allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
+ end
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
+ let(:expected_value) { 5 }
+ let(:start) { 30.days.ago.to_fs(:db) }
+ let(:finish) { 2.days.ago.to_fs(:db) }
+ let(:expected_query) do
+ <<~SQL.squish
+ SELECT
+ (SELECT COUNT(DISTINCT "projects"."creator_id") FROM "projects" WHERE "projects"."import_type" IS NOT NULL AND "projects"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "bulk_imports"."user_id") FROM "bulk_imports" WHERE "bulk_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "jira_imports"."user_id") FROM "jira_imports" WHERE "jira_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "csv_issue_imports"."user_id") FROM "csv_issue_imports" WHERE "csv_issue_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
+ (SELECT COUNT(DISTINCT "group_import_states"."user_id") FROM "group_import_states" WHERE "group_import_states"."created_at" BETWEEN '#{start}' AND '#{finish}')
+ SQL
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 71e9e7a8e7d..cbf4d3c8261 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
specify do
aggregate_failures do
- expect(track_action(author: user, project: project)).to be_truthy
- expect(track_action(author: user2, project: project)).to be_truthy
- expect(track_action(author: user3, project: project)).to be_truthy
+ track_action(author: user, project: project)
+ track_action(author: user2, project: project)
+ track_action(author: user3, project: project)
expect(count_unique(date_from: time.beginning_of_week, date_to: 1.week.from_now)).to eq(3)
end
@@ -30,7 +30,9 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
it_behaves_like 'internal event tracking'
it 'does not track edit actions if author is not present' do
- expect(track_action(author: nil, project: project)).to be_nil
+ track_action(author: nil, project: project)
+
+ expect(count_unique(date_from: time.beginning_of_week, date_to: 1.week.from_now)).to eq(0)
end
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 6d30947167c..68af9cd9cfc 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -106,25 +106,4 @@ RSpec.describe Gitlab::UsageDataQueries do
expect(described_class.maximum_id(Project)).to eq(nil)
end
end
-
- describe 'sent_in_product_marketing_email_count' do
- it 'returns sql query that returns correct value' do
- expect(described_class.sent_in_product_marketing_email_count(nil, 0, 0)).to eq(
- 'SELECT COUNT("in_product_marketing_emails"."id") ' \
- 'FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0'
- )
- end
- end
-
- describe 'clicked_in_product_marketing_email_count' do
- it 'returns sql query that returns correct value' do
- expect(described_class.clicked_in_product_marketing_email_count(nil, 0, 0)).to eq(
- 'SELECT COUNT("in_product_marketing_emails"."id") ' \
- 'FROM "in_product_marketing_emails" ' \
- 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0 ' \
- 'AND "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL'
- )
- end
- end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index a1564318408..f43e49bd616 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -241,29 +241,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
)
end
- it 'includes import gmau usage data' do
- for_defined_days_back do
- user = create(:user)
- group = create(:group)
-
- group.add_owner(user)
-
- create(:project, import_type: :github, creator_id: user.id)
- create(:jira_import_state, :finished, project: create(:project, creator_id: user.id))
- create(:issue_csv_import, user: user)
- create(:group_import_state, group: group, user: user)
- create(:bulk_import, user: user)
- end
-
- expect(described_class.usage_activity_by_stage_manage({})).to include(
- unique_users_all_imports: 10
- )
-
- expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include(
- unique_users_all_imports: 5
- )
- end
-
it 'includes imports usage data', :clean_gitlab_redis_cache do
for_defined_days_back do
user = create(:user)
@@ -366,7 +343,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
create(:issue, project: project, author: Users::Internal.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
- create(:jira_integration, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
create(:jira_integration, active: true, project: create(:project, :jira_dvcs_server, creator: user))
end
@@ -377,7 +353,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
service_desk_enabled_projects: 2,
service_desk_issues: 2,
projects_jira_active: 2,
- projects_jira_dvcs_cloud_active: 2,
projects_jira_dvcs_server_active: 2
)
expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(
@@ -387,7 +362,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
service_desk_enabled_projects: 1,
service_desk_issues: 1,
projects_jira_active: 1,
- projects_jira_dvcs_cloud_active: 1,
projects_jira_dvcs_server_active: 1
)
end
@@ -590,67 +564,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
end
context 'when not relying on database records' do
- describe '.features_usage_data_ce' do
- subject { described_class.features_usage_data_ce }
-
- it 'gathers feature usage data', :aggregate_failures do
- expect(subject[:instance_auto_devops_enabled]).to eq(Gitlab::CurrentSettings.auto_devops_enabled?)
- expect(subject[:mattermost_enabled]).to eq(Gitlab.config.mattermost.enabled)
- expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
- expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
- expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?)
- expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?)
- expect(subject[:reply_by_email_enabled]).to eq(Gitlab::Email::IncomingEmail.enabled?)
- expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
- expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled)
- expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
- expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?)
- expect(subject[:gitpod_enabled]).to eq(Gitlab::CurrentSettings.gitpod_enabled?)
- end
-
- context 'with embedded Prometheus' do
- it 'returns true when embedded Prometheus is enabled' do
- allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
-
- expect(subject[:prometheus_enabled]).to eq(true)
- end
-
- it 'returns false when embedded Prometheus is disabled' do
- allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
-
- expect(subject[:prometheus_enabled]).to eq(false)
- end
- end
-
- context 'with embedded grafana' do
- it 'returns true when embedded grafana is enabled' do
- stub_application_setting(grafana_enabled: true)
-
- expect(subject[:grafana_link_enabled]).to eq(true)
- end
-
- it 'returns false when embedded grafana is disabled' do
- stub_application_setting(grafana_enabled: false)
-
- expect(subject[:grafana_link_enabled]).to eq(false)
- end
- end
-
- context 'with Gitpod' do
- it 'returns true when is enabled' do
- stub_application_setting(gitpod_enabled: true)
-
- expect(subject[:gitpod_enabled]).to eq(true)
- end
-
- it 'returns false when is disabled' do
- stub_application_setting(gitpod_enabled: false)
-
- expect(subject[:gitpod_enabled]).to eq(false)
- end
- end
- end
-
describe '.components_usage_data' do
subject { described_class.components_usage_data }
diff --git a/spec/lib/gitlab/utils/file_info_spec.rb b/spec/lib/gitlab/utils/file_info_spec.rb
index 480036b2fd0..1f52fcb48b6 100644
--- a/spec/lib/gitlab/utils/file_info_spec.rb
+++ b/spec/lib/gitlab/utils/file_info_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Utils::FileInfo, feature_category: :shared do
describe '.linked?' do
it 'raises an error when file does not exist' do
- expect { subject.linked?('foo') }.to raise_error(Errno::ENOENT)
+ expect { subject.linked?("#{tmpdir}/foo") }.to raise_error(Errno::ENOENT)
end
shared_examples 'identifies a linked file' do
@@ -56,7 +56,7 @@ RSpec.describe Gitlab::Utils::FileInfo, feature_category: :shared do
describe '.shares_hard_link?' do
it 'raises an error when file does not exist' do
- expect { subject.shares_hard_link?('foo') }.to raise_error(Errno::ENOENT)
+ expect { subject.shares_hard_link?("#{tmpdir}/foo") }.to raise_error(Errno::ENOENT)
end
shared_examples 'identifies a file that shares a hard link' do
diff --git a/spec/lib/gitlab/web_ide/default_oauth_application_spec.rb b/spec/lib/gitlab/web_ide/default_oauth_application_spec.rb
new file mode 100644
index 00000000000..9bfdc799aec
--- /dev/null
+++ b/spec/lib/gitlab/web_ide/default_oauth_application_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WebIde::DefaultOauthApplication, feature_category: :web_ide do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:oauth_application) { create(:oauth_application, owner: nil) }
+
+ describe '#feature_enabled?' do
+ where(:vscode_web_ide, :web_ide_oauth, :expectation) do
+ [
+ [ref(:current_user), false, false],
+ [false, ref(:current_user), false],
+ [ref(:current_user), ref(:current_user), true]
+ ]
+ end
+
+ with_them do
+ it 'returns the expected value' do
+ stub_feature_flags(vscode_web_ide: vscode_web_ide, web_ide_oauth: web_ide_oauth)
+
+ expect(described_class.feature_enabled?(current_user)).to be(expectation)
+ end
+ end
+ end
+
+ describe '#oauth_application' do
+ it 'returns web_ide_oauth_application from application_settings' do
+ expect(described_class.oauth_application).to be_nil
+
+ stub_application_setting({ web_ide_oauth_application: oauth_application })
+
+ expect(described_class.oauth_application).to be(oauth_application)
+ end
+ end
+
+ describe '#oauth_callback_url' do
+ it 'returns route URL for oauth callback' do
+ expect(described_class.oauth_callback_url).to eq(Gitlab::Routing.url_helpers.ide_oauth_redirect_url)
+ end
+ end
+
+ describe '#ensure_oauth_application!' do
+ it 'if web_ide_oauth_application already exists, does nothing' do
+ expect(application_settings).not_to receive(:lock!)
+ expect(::Doorkeeper::Application).not_to receive(:new)
+
+ stub_application_setting({ web_ide_oauth_application: oauth_application })
+
+ described_class.ensure_oauth_application!
+ end
+
+ it 'if web_ide_oauth_application created while locked, does nothing' do
+ expect(application_settings).to receive(:lock!) do
+ stub_application_setting({ web_ide_oauth_application: oauth_application })
+ end
+ expect(::Doorkeeper::Application).not_to receive(:new)
+ expect(::Gitlab::CurrentSettings).not_to receive(:expire_current_application_settings)
+
+ described_class.ensure_oauth_application!
+ end
+
+ it 'creates web_ide_oauth_application' do
+ expect(application_settings).to receive(:transaction).and_call_original
+ expect(::Doorkeeper::Application).to receive(:new).and_call_original
+ expect(::Gitlab::CurrentSettings).to receive(:expire_current_application_settings).and_call_original
+
+ expect(application_settings.web_ide_oauth_application).to be_nil
+
+ described_class.ensure_oauth_application!
+
+ result = application_settings.web_ide_oauth_application
+ expect(result).not_to be_nil
+ expect(result).to have_attributes(
+ name: 'GitLab Web IDE',
+ redirect_uri: described_class.oauth_callback_url,
+ scopes: ['api'],
+ trusted: true,
+ confidential: false
+ )
+ end
+ end
+
+ def application_settings
+ ::Gitlab::CurrentSettings.current_application_settings
+ end
+end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index d77763f89be..0d5ec5690a9 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -480,6 +480,14 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
describe '.send_url' do
let(:url) { 'http://example.com' }
+ let(:expected_params) do
+ {
+ 'URL' => url,
+ 'AllowRedirects' => false,
+ 'Body' => '',
+ 'Method' => 'GET'
+ }
+ end
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(
@@ -488,12 +496,7 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
- expect(params).to eq({
- 'URL' => url,
- 'AllowRedirects' => false,
- 'Body' => '',
- 'Method' => 'GET'
- }.deep_stringify_keys)
+ expect(params).to eq(expected_params)
end
context 'when body, headers and method are specified' do
@@ -501,6 +504,14 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
let(:headers) { { Authorization: ['Bearer token'] } }
let(:method) { 'POST' }
+ let(:expected_params) do
+ super().merge(
+ 'Body' => body,
+ 'Header' => headers,
+ 'Method' => method
+ ).deep_stringify_keys
+ end
+
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(
described_class.send_url(url, body: body, headers: headers, method: method)
@@ -508,13 +519,33 @@ RSpec.describe Gitlab::Workhorse, feature_category: :shared do
expect(key).to eq("Gitlab-Workhorse-Send-Data")
expect(command).to eq("send-url")
- expect(params).to eq({
- 'URL' => url,
- 'AllowRedirects' => false,
- 'Body' => body,
- 'Header' => headers,
- 'Method' => method
- }.deep_stringify_keys)
+ expect(params).to eq(expected_params)
+ end
+ end
+
+ context 'when timeouts are set' do
+ let(:timeouts) { { open: '5', read: '5' } }
+ let(:expected_params) { super().merge('DialTimeout' => '5s', 'ResponseHeaderTimeout' => '5s') }
+
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(described_class.send_url(url, timeouts: timeouts))
+
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("send-url")
+ expect(params).to eq(expected_params)
+ end
+ end
+
+ context 'when an response statuses are set' do
+ let(:response_statuses) { { error: :service_unavailable, timeout: :bad_request } }
+ let(:expected_params) { super().merge('ErrorResponseStatus' => 503, 'TimeoutResponseStatus' => 400) }
+
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(described_class.send_url(url, response_statuses: response_statuses))
+
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("send-url")
+ expect(params).to eq(expected_params)
end
end
end
diff --git a/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
new file mode 100644
index 00000000000..36fa350e46f
--- /dev/null
+++ b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::GoogleCloudPlatform::ArtifactRegistry::Client, feature_category: :container_registry do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:rsa_key) { OpenSSL::PKey::RSA.generate(3072) }
+ let_it_be(:rsa_key_data) { rsa_key.to_s }
+
+ let(:gcp_project_id) { 'gcp_project_id' }
+ let(:gcp_location) { 'gcp_location' }
+ let(:gcp_repository) { 'gcp_repository' }
+ let(:gcp_wlif) { 'https://wlif.test' }
+
+ let(:user) { project.owner }
+ let(:client) do
+ described_class.new(
+ project: project,
+ user: user,
+ gcp_project_id: gcp_project_id,
+ gcp_location: gcp_location,
+ gcp_repository: gcp_repository,
+ gcp_wlif: gcp_wlif
+ )
+ end
+
+ describe '#list_docker_images' do
+ let(:page_token) { nil }
+
+ subject(:list) { client.list_docker_images(page_token: page_token) }
+
+ before do
+ stub_application_setting(ci_jwt_signing_key: rsa_key_data)
+ end
+
+ it 'calls glgo list docker images API endpoint' do
+ stub_list_docker_image(body: dummy_list_body)
+ expect(client).to receive(:encoded_jwt).with(wlif: gcp_wlif)
+
+ expect(list).to include(images: an_instance_of(Array), next_page_token: an_instance_of(String))
+ end
+
+ context 'with a page token set' do
+ let(:page_token) { 'token' }
+
+ it 'calls glgo list docker images API endpoint with a page token' do
+ stub_list_docker_image(body: dummy_list_body, page_token: page_token)
+
+ expect(list).to include(images: an_instance_of(Array), next_page_token: an_instance_of(String))
+ end
+ end
+
+ context 'with an erroneous response' do
+ it 'returns an empty hash' do
+ stub_list_docker_image(body: dummy_list_body, status_code: 400)
+
+ expect(list).to eq({})
+ end
+ end
+
+ private
+
+ def stub_list_docker_image(body:, page_token: nil, status_code: 200)
+ url = "#{described_class::GLGO_BASE_URL}/gcp/ar"
+ url << "/projects/#{gcp_project_id}"
+ url << "/locations/#{gcp_location}"
+ url << "/repositories/#{gcp_repository}/docker"
+ url << "?page_size=#{described_class::PAGE_SIZE}"
+ url << "&page_token=#{page_token}" if page_token.present?
+
+ stub_request(:get, url)
+ .to_return(status: status_code, body: body)
+ end
+
+ def dummy_list_body
+ <<-BODY
+ {
+ "images": [
+ {
+ "built_at": "2023-11-30T23:23:11.980068941Z",
+ "media_type": "application/vnd.docker.distribution.manifest.v2+json",
+ "name": "projects/project/locations/location/repositories/repo/dockerImages/image@sha256:6a0657acfef760bd9e293361c9b558e98e7d740ed0dffca823d17098a4ffddf5",
+ "size_bytes": 2827903,
+ "tags": [
+ "tag1",
+ "tag2"
+ ],
+ "updated_at": "2023-12-07T11:48:50.840751Z",
+ "uploaded_at": "2023-12-07T11:48:47.598511Z",
+ "uri": "location.pkg.dev/project/repo/image@sha256:6a0657acfef760bd9e293361c9b558e98e7d740ed0dffca823d17098a4ffddf5"
+ }
+ ],
+ "next_page_token": "next_page_token"
+ }
+ BODY
+ end
+ end
+end
diff --git a/spec/lib/integrations/google_cloud_platform/jwt_spec.rb b/spec/lib/integrations/google_cloud_platform/jwt_spec.rb
new file mode 100644
index 00000000000..51707c26a3a
--- /dev/null
+++ b/spec/lib/integrations/google_cloud_platform/jwt_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::GoogleCloudPlatform::Jwt, feature_category: :shared do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:claims) { { audience: 'http://sandbox.test', wlif: 'http://wlif.test' } }
+ let(:jwt) { described_class.new(project: project, user: user, claims: claims) }
+
+ describe '#encoded' do
+ let_it_be(:rsa_key) { OpenSSL::PKey::RSA.generate(3072) }
+ let_it_be(:rsa_key_data) { rsa_key.to_s }
+
+ subject(:encoded) { jwt.encoded }
+
+ before do
+ stub_application_setting(ci_jwt_signing_key: rsa_key_data)
+ end
+
+ it 'creates a valid jwt' do
+ payload, headers = JWT.decode(encoded, rsa_key.public_key, true, { algorithm: 'RS256' })
+
+ expect(payload).to include(
+ 'root_namespace_path' => project.root_namespace.full_path,
+ 'root_namespace_id' => project.root_namespace.id.to_s,
+ 'wlif' => claims[:wlif],
+ 'aud' => claims[:audience],
+ 'project_id' => project.id.to_s,
+ 'project_path' => project.full_path,
+ 'user_id' => user.id.to_s,
+ 'user_email' => user.email,
+ 'sub' => "project_#{project.id}_user_#{user.id}",
+ 'iss' => Gitlab.config.gitlab.url
+ )
+
+ expect(headers).to include(
+ 'kid' => rsa_key.public_key.to_jwk[:kid]
+ )
+ end
+
+ context 'with missing jwt audience' do
+ let(:claims) { { wlif: 'http://wlif.test' } }
+
+ it 'raises an ArgumentError' do
+ expect { encoded }.to raise_error(ArgumentError, described_class::JWT_OPTIONS_ERROR)
+ end
+ end
+
+ context 'with missing jwt wlif' do
+ let(:claims) { { audience: 'http://sandbox.test' } }
+
+ it 'raises an ArgumentError' do
+ expect { encoded }.to raise_error(ArgumentError, described_class::JWT_OPTIONS_ERROR)
+ end
+ end
+
+ context 'with no ci signing key' do
+ before do
+ stub_application_setting(ci_jwt_signing_key: nil)
+ end
+
+ it 'raises a NoSigningKeyError' do
+ expect { encoded }.to raise_error(described_class::NoSigningKeyError)
+ end
+ end
+
+ context 'with oidc_issuer_url feature flag disabled' do
+ before do
+ stub_feature_flags(oidc_issuer_url: false)
+ # Settings.gitlab.base_url and Gitlab.config.gitlab.url are the
+ # same for test. Changing that to assert the proper behavior here.
+ allow(Settings.gitlab).to receive(:base_url).and_return('test.dev')
+ end
+
+ it 'uses a different issuer' do
+ payload, _ = JWT.decode(encoded, rsa_key.public_key, true, { algorithm: 'RS256' })
+
+ expect(payload).to include(
+ 'iss' => Settings.gitlab.base_url
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/organization/current_organization_spec.rb b/spec/lib/organization/current_organization_spec.rb
new file mode 100644
index 00000000000..ffd37ac4de9
--- /dev/null
+++ b/spec/lib/organization/current_organization_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Organization::CurrentOrganization, feature_category: :organization do
+ include described_class
+
+ after do
+ # Wipe thread variables between specs.
+ Thread.current[described_class::CURRENT_ORGANIZATION_THREAD_VAR] = nil
+ end
+
+ describe '.current_organization' do
+ subject { current_organization }
+
+ context 'when current organization is set' do
+ let(:some_organization) { create(:organization) }
+
+ before do
+ self.current_organization = some_organization
+ end
+
+ it { is_expected.to eq some_organization }
+ end
+
+ context 'when organization is not set' do
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '.current_organization=' do
+ subject(:setter) { self.current_organization = some_organization }
+
+ let(:some_organization) { create(:organization) }
+
+ it 'sets current organization' do
+ expect { setter }.to change { current_organization }.from(nil).to(some_organization)
+ end
+ end
+
+ describe '.with_current_organization' do
+ let(:some_organization) { create(:organization) }
+
+ it 'sets current organization within block' do
+ expect(current_organization).to be_nil
+ with_current_organization(some_organization) do
+ expect(current_organization).to eq some_organization
+ end
+ expect(current_organization).to be_nil
+ end
+
+ context 'when an error is raised' do
+ it 'resets current organization' do
+ begin
+ with_current_organization(some_organization) do
+ raise StandardError
+ end
+ rescue StandardError
+ nil
+ end
+
+ expect(current_organization).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/peek/views/click_house_spec.rb b/spec/lib/peek/views/click_house_spec.rb
index 1ff49afd728..f7cecbaac88 100644
--- a/spec/lib/peek/views/click_house_spec.rb
+++ b/spec/lib/peek/views/click_house_spec.rb
@@ -34,13 +34,11 @@ RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_ca
}),
a_hash_including({
sql: 'INSERT INTO events (id) VALUES (1)',
- database: 'database: main',
- statistics: include('written_rows=>"1"')
+ database: 'database: main'
}),
a_hash_including({
sql: 'INSERT INTO events (id) FORMAT CSV',
- database: 'database: main',
- statistics: include('written_rows=>"2"')
+ database: 'database: main'
})
])
end
diff --git a/spec/lib/product_analytics/event_params_spec.rb b/spec/lib/product_analytics/event_params_spec.rb
deleted file mode 100644
index e560fd10dfd..00000000000
--- a/spec/lib/product_analytics/event_params_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ProductAnalytics::EventParams do
- describe '.parse_event_params' do
- subject { described_class.parse_event_params(raw_event) }
-
- let(:raw_event) { Gitlab::Json.parse(fixture_file('product_analytics/event.json')) }
-
- it 'extracts all params from raw event' do
- expected_params = {
- project_id: '1',
- platform: 'web',
- name_tracker: 'sp',
- v_tracker: 'js-2.14.0',
- event_id: 'fbf14096-74ee-47e4-883c-8a0d6cb72e37',
- domain_userid: '79543c31-cfc3-4479-a737-fafb9333c8ba',
- domain_sessionid: '54f6d3f3-f4f9-4fdc-87e0-a2c775234c1b',
- domain_sessionidx: 4,
- page_url: 'http://example.com/products/1',
- page_referrer: 'http://example.com/products/1',
- br_lang: 'en-US',
- br_cookies: true,
- os_timezone: 'America/Los_Angeles',
- doc_charset: 'UTF-8',
- se_category: 'category',
- se_action: 'action',
- se_label: 'label',
- se_property: 'property',
- se_value: 12.34
- }
-
- expect(subject).to include(expected_params)
- end
- end
-
- describe '.has_required_params?' do
- subject { described_class.has_required_params?(params) }
-
- context 'aid and eid are present' do
- let(:params) { { 'aid' => 1, 'eid' => 2 } }
-
- it { expect(subject).to be_truthy }
- end
-
- context 'aid and eid are missing' do
- let(:params) { {} }
-
- it { expect(subject).to be_falsey }
- end
-
- context 'eid is missing' do
- let(:params) { { 'aid' => 1 } }
-
- it { expect(subject).to be_falsey }
- end
- end
-end
diff --git a/spec/lib/sbom/package_url_spec.rb b/spec/lib/sbom/package_url_spec.rb
index 92490b184df..a62332b44ad 100644
--- a/spec/lib/sbom/package_url_spec.rb
+++ b/spec/lib/sbom/package_url_spec.rb
@@ -26,7 +26,7 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
-require_relative '../../support/helpers/next_instance_of'
+require 'gitlab/rspec/next_instance_of'
require_relative '../../support/shared_contexts/lib/sbom/package_url_shared_contexts'
RSpec.describe Sbom::PackageUrl, feature_category: :dependency_management do
diff --git a/spec/lib/sidebars/concerns/container_with_html_options_spec.rb b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
index 588e89a80f7..6adbfce3087 100644
--- a/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
+++ b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
@@ -18,10 +18,4 @@ RSpec.describe Sidebars::Concerns::ContainerWithHtmlOptions, feature_category: :
expect(subject.container_html_options).to eq(aria: { label: 'Foo' })
end
end
-
- describe '#collapsed_container_html_options' do
- it 'includes by default aria-label attribute' do
- expect(subject.collapsed_container_html_options).to eq(aria: { label: 'Foo' })
- end
- end
end
diff --git a/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb b/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb
index 2c4c4c48eae..543f9b26a66 100644
--- a/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb
+++ b/spec/lib/sidebars/explore/menus/catalog_menu_spec.rb
@@ -10,31 +10,19 @@ RSpec.describe Sidebars::Explore::Menus::CatalogMenu, feature_category: :navigat
subject { described_class.new(context) }
- context 'when `global_ci_catalog` is enabled`' do
- it 'renders' do
- expect(subject.render?).to be(true)
- end
-
- it 'renders the correct link' do
- expect(subject.link).to match "explore/catalog"
- end
-
- it 'renders the correct title' do
- expect(subject.title).to eq "CI/CD Catalog"
- end
-
- it 'renders the correct icon' do
- expect(subject.sprite_icon).to eq "catalog-checkmark"
- end
+ it 'renders' do
+ expect(subject.render?).to be(true)
end
- context 'when `global_ci_catalog` FF is disabled' do
- before do
- stub_feature_flags(global_ci_catalog: false)
- end
+ it 'renders the correct link' do
+ expect(subject.link).to match "explore/catalog"
+ end
+
+ it 'renders the correct title' do
+ expect(subject.title).to eq "CI/CD Catalog"
+ end
- it 'does not render' do
- expect(subject.render?).to be(false)
- end
+ it 'renders the correct icon' do
+ expect(subject.sprite_icon).to eq "catalog-checkmark"
end
end
diff --git a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
index 2cce2d28e68..00083fcfbf1 100644
--- a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb
@@ -8,12 +8,6 @@ RSpec.describe Sidebars::Groups::Menus::ScopeMenu, feature_category: :navigation
let(:context) { Sidebars::Groups::Context.new(current_user: user, container: group) }
let(:menu) { described_class.new(context) }
- describe '#extra_nav_link_html_options' do
- subject { menu.extra_nav_link_html_options }
-
- specify { is_expected.to match(hash_including(class: 'context-header has-tooltip', title: context.group.name)) }
- end
-
it_behaves_like 'serializable as super_sidebar_menu_args' do
let(:extra_attrs) do
{
diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb
index cc2809df85f..0ff9bbebdc3 100644
--- a/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb
+++ b/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Sidebars::Groups::SuperSidebarMenus::AnalyzeMenu, feature_categor
expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem])
expect(items.map(&:item_id)).to eq([
:analytics_dashboards,
- :dashboards_analytics,
:cycle_analytics,
:ci_cd_analytics,
:contribution_analytics,
diff --git a/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb b/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb
index 87346176a4c..7f1dab6a8b4 100644
--- a/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb
+++ b/spec/lib/sidebars/organizations/menus/manage_menu_spec.rb
@@ -7,17 +7,15 @@ RSpec.describe Sidebars::Organizations::Menus::ManageMenu, feature_category: :na
let_it_be(:user) { build(:user) }
let_it_be(:context) { Sidebars::Context.new(current_user: user, container: organization) }
- let(:items) { subject.instance_variable_get(:@items) }
-
- subject { described_class.new(context) }
+ subject(:menu) { described_class.new(context) }
it 'has title and sprite_icon' do
- expect(subject.title).to eq(s_("Navigation|Manage"))
- expect(subject.sprite_icon).to eq("users")
+ expect(menu.title).to eq(s_("Navigation|Manage"))
+ expect(menu.sprite_icon).to eq("users")
end
describe 'Menu items' do
- subject { described_class.new(context).renderable_items.find { |e| e.item_id == item_id } }
+ subject(:item) { menu.renderable_items.find { |e| e.item_id == item_id } }
describe 'Groups and projects' do
let(:item_id) { :organization_groups_and_projects }
@@ -28,7 +26,15 @@ RSpec.describe Sidebars::Organizations::Menus::ManageMenu, feature_category: :na
describe 'Users' do
let(:item_id) { :organization_users }
- it { is_expected.not_to be_nil }
+ context 'when current user has permissions' do
+ let_it_be(:organization_user) { create(:organization_user, user: user, organization: organization) } # rubocop: disable RSpec/FactoryBot/AvoidCreate -- does not work with build_stubbed
+
+ it { is_expected.not_to be_nil }
+ end
+
+ context 'when current user does not have permissions' do
+ it { is_expected.to be_nil }
+ end
end
end
end
diff --git a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
index 1aa0ea30d0a..b29427d68dd 100644
--- a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu, feature_category: :sou
end
end
- describe 'Contributor statistics' do
+ describe 'Contributor analytics' do
let_it_be(:item_id) { :contributors }
context 'when analytics is disabled' do
diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
index 108a98e28a4..fb1ec94dfe8 100644
--- a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
@@ -25,10 +25,4 @@ RSpec.describe Sidebars::Projects::Menus::ScopeMenu, feature_category: :navigati
specify { is_expected.to match(hash_including(class: 'shortcuts-project')) }
end
-
- describe '#extra_nav_link_html_options' do
- subject { described_class.new(context).extra_nav_link_html_options }
-
- specify { is_expected.to match(hash_including(class: 'context-header has-tooltip', title: context.project.name)) }
- end
end
diff --git a/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb b/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb
deleted file mode 100644
index e74647894fa..00000000000
--- a/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::ShimoMenu do
- let_it_be_with_reload(:project) { create(:project) }
-
- let(:context) { Sidebars::Projects::Context.new(current_user: project.first_owner, container: project) }
-
- subject(:shimo_menu) { described_class.new(context) }
-
- describe '#render?' do
- context 'without a valid Shimo integration' do
- it "doesn't render the menu" do
- expect(shimo_menu.render?).to be_falsey
- end
- end
-
- context 'with a valid Shimo integration' do
- let_it_be_with_reload(:shimo_integration) { create(:shimo_integration, project: project) }
-
- context 'when integration is active' do
- it 'renders the menu' do
- expect(shimo_menu.render?).to eq true
- end
-
- it 'renders menu link' do
- expected_url = Rails.application.routes.url_helpers.project_integrations_shimo_path(project)
- expect(shimo_menu.link).to eq expected_url
- end
- end
-
- context 'when integration is inactive' do
- before do
- shimo_integration.update!(active: false)
- end
-
- it "doesn't render the menu" do
- expect(shimo_menu.render?).to eq false
- end
- end
- end
- end
-end
diff --git a/spec/lib/sidebars/projects/panel_spec.rb b/spec/lib/sidebars/projects/panel_spec.rb
index ec1df438cf1..b6ff1acc176 100644
--- a/spec/lib/sidebars/projects/panel_spec.rb
+++ b/spec/lib/sidebars/projects/panel_spec.rb
@@ -30,28 +30,6 @@ RSpec.describe Sidebars::Projects::Panel, feature_category: :navigation do
expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::WikiMenu) }).to be_nil
end
end
-
- context 'shimo only' do
- let_it_be(:shimo) { create(:shimo_integration, active: true) }
-
- let(:project) { shimo.project }
-
- it 'contains Shimo menu item' do
- expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ShimoMenu) }).not_to be_nil
- end
- end
-
- context 'confluence & shimo' do
- let_it_be(:confluence) { create(:confluence_integration, active: true) }
- let_it_be(:shimo) { create(:shimo_integration, active: true) }
-
- let(:project) { confluence.project }
-
- it 'contains Confluence menu item, not Shimo' do
- expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ConfluenceMenu) }).not_to be_nil
- expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ShimoMenu) }).to be_nil
- end
- end
end
context 'when integration is not present' do
diff --git a/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb
index fa33e7bedfb..eebd089ad3f 100644
--- a/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/access_tokens_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::AccessTokensMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/personal_access_tokens',
+ link: '/-/user_settings/personal_access_tokens',
title: _('Access Tokens'),
icon: 'token',
active_routes: { controller: :personal_access_tokens }
diff --git a/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb
index be5f826ee58..d4b9c359a98 100644
--- a/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/active_sessions_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::ActiveSessionsMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/active_sessions',
+ link: '/-/user_settings/active_sessions',
title: _('Active Sessions'),
icon: 'monitor-lines',
active_routes: { controller: :active_sessions }
diff --git a/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb
index eeda4fb844c..a0c175051df 100644
--- a/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/applications_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::ApplicationsMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/applications',
+ link: '/-/user_settings/applications',
title: _('Applications'),
icon: 'applications',
active_routes: { controller: 'oauth/applications' }
diff --git a/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb
index 33be5050c37..5a154d7dafb 100644
--- a/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/authentication_log_menu_spec.rb
@@ -4,10 +4,10 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::AuthenticationLogMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/audit_log',
+ link: '/-/user_settings/authentication_log',
title: _('Authentication Log'),
icon: 'log',
- active_routes: { path: 'profiles#audit_log' }
+ active_routes: { path: 'user_settings#authentication_log' }
it_behaves_like 'User settings menu #render? method'
end
diff --git a/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb
index 168019fea5d..83e47fd120a 100644
--- a/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb
+++ b/spec/lib/sidebars/user_settings/menus/password_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::UserSettings::Menus::PasswordMenu, feature_category: :navigation do
it_behaves_like 'User settings menu',
- link: '/-/profile/password',
+ link: '/-/user_settings/password',
title: _('Password'),
icon: 'lock',
active_routes: { controller: :passwords }
diff --git a/spec/lib/system_check/base_check_spec.rb b/spec/lib/system_check/base_check_spec.rb
index 168bda07791..2478e6e84ea 100644
--- a/spec/lib/system_check/base_check_spec.rb
+++ b/spec/lib/system_check/base_check_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe SystemCheck::BaseCheck do
it 'responds to Gitlab::TaskHelpers methods' do
expect(subject).to respond_to :ask_to_continue, :os_name, :prompt, :run_and_match, :run_command,
:run_command!, :uid_for, :gid_for, :gitlab_user, :gitlab_user?, :warn_user_is_not_gitlab,
- :repository_storage_paths_args, :user_home, :checkout_or_clone_version, :clone_repo, :checkout_version
+ :user_home, :checkout_or_clone_version, :clone_repo, :checkout_version
end
end
end
diff --git a/spec/lib/system_check/orphans/namespace_check_spec.rb b/spec/lib/system_check/orphans/namespace_check_spec.rb
deleted file mode 100644
index 3964068b20c..00000000000
--- a/spec/lib/system_check/orphans/namespace_check_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe SystemCheck::Orphans::NamespaceCheck, :silence_stdout do
- let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } }
-
- before do
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces)
- end
-
- describe '#multi_check' do
- context 'all orphans' do
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 repos/@hashed] }
-
- it 'prints list of all orphaned namespaces except @hashed' do
- expect_list_of_orphans(%w[orphan1 orphan2])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/@hashed] }
-
- it 'prints list of orphaned namespaces' do
- expect_list_of_orphans(%w[orphan1 orphan2])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace and parents with same name as orphans' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let!(:second_level) { create(:group, path: 'second-level', parent: first_level) }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/second-level /repos/@hashed] }
-
- it 'prints list of orphaned namespaces ignoring parents with same namespace as orphans' do
- expect_list_of_orphans(%w[orphan1 orphan2 second-level])
-
- subject.multi_check
- end
- end
-
- context 'no orphans' do
- let(:disk_namespaces) { %w[@hashed] }
-
- it 'prints an empty list ignoring @hashed' do
- expect_list_of_orphans([])
-
- subject.multi_check
- end
- end
- end
-
- def expect_list_of_orphans(orphans)
- expect(subject).to receive(:print_orphans).with(orphans, 'default')
- end
-end
diff --git a/spec/lib/system_check/orphans/repository_check_spec.rb b/spec/lib/system_check/orphans/repository_check_spec.rb
deleted file mode 100644
index 0504e133ab9..00000000000
--- a/spec/lib/system_check/orphans/repository_check_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe SystemCheck::Orphans::RepositoryCheck, :silence_stdout do
- let(:storages) { Gitlab.config.repositories.storages.reject { |key, _| key.eql? 'broken' } }
-
- before do
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- allow(subject).to receive(:fetch_disk_namespaces).and_return(disk_namespaces)
- allow(subject).to receive(:fetch_disk_repositories).and_return(disk_repositories)
- end
-
- describe '#multi_check' do
- context 'all orphans' do
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 repos/@hashed] }
- let(:disk_repositories) { %w[repo1.git repo2.git] }
-
- it 'prints list of all orphaned namespaces except @hashed' do
- expect_list_of_orphans(%w[orphan1/repo1.git orphan1/repo2.git orphan2/repo1.git orphan2/repo2.git])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let!(:project) { create(:project, path: 'repo', namespace: first_level) }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/@hashed] }
- let(:disk_repositories) { %w[repo.git] }
-
- it 'prints list of orphaned namespaces' do
- expect_list_of_orphans(%w[orphan1/repo.git orphan2/repo.git])
-
- subject.multi_check
- end
- end
-
- context 'few orphans with existing namespace and parents with same name as orphans' do
- let!(:first_level) { create(:group, path: 'my-namespace') }
- let!(:second_level) { create(:group, path: 'second-level', parent: first_level) }
- let!(:project) { create(:project, path: 'repo', namespace: first_level) }
- let(:disk_namespaces) { %w[/repos/orphan1 /repos/orphan2 /repos/my-namespace /repos/second-level /repos/@hashed] }
- let(:disk_repositories) { %w[repo.git] }
-
- it 'prints list of orphaned namespaces ignoring parents with same namespace as orphans' do
- expect_list_of_orphans(%w[orphan1/repo.git orphan2/repo.git second-level/repo.git])
-
- subject.multi_check
- end
- end
-
- context 'no orphans' do
- let(:disk_namespaces) { %w[@hashed] }
- let(:disk_repositories) { %w[repo.git] }
-
- it 'prints an empty list ignoring @hashed' do
- expect_list_of_orphans([])
-
- subject.multi_check
- end
- end
- end
-
- def expect_list_of_orphans(orphans)
- expect(subject).to receive(:print_orphans).with(orphans, 'default')
- end
-end
diff --git a/spec/lib/uploaded_file_spec.rb b/spec/lib/uploaded_file_spec.rb
index 721b3d70feb..3a77b12be82 100644
--- a/spec/lib/uploaded_file_spec.rb
+++ b/spec/lib/uploaded_file_spec.rb
@@ -294,4 +294,8 @@ RSpec.describe UploadedFile, feature_category: :package_registry do
it { expect(described_class.new(temp_file.path).sanitize_filename('..')).to eq('_..') }
it { expect(described_class.new(temp_file.path).sanitize_filename('')).to eq('unnamed') }
end
+
+ describe '#empty_size?' do
+ it { expect(described_class.new(temp_file.path).empty_size?).to eq(true) }
+ end
end
diff --git a/spec/lib/vite_gdk_spec.rb b/spec/lib/vite_gdk_spec.rb
new file mode 100644
index 00000000000..f54ede9d877
--- /dev/null
+++ b/spec/lib/vite_gdk_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+VITE_GDK_CONFIG_FILEPATH = "config/vite.gdk.json"
+
+RSpec.describe ViteGdk, feature_category: :tooling do
+ before do
+ allow(ViteRuby).to receive(:configure)
+ allow(ViteRuby.env).to receive(:[]=)
+ allow(YAML).to receive(:safe_load_file)
+ end
+
+ describe '#load_gdk_vite_config' do
+ context 'when not in production environment' do
+ before do
+ stub_env('RAILS_ENV', nil)
+ end
+
+ context 'when it loads file successfully' do
+ it 'configures ViteRuby' do
+ expect(File).to receive(:exist?) do |file_path|
+ expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
+ end.and_return(true)
+ expect(YAML).to receive(:safe_load_file) do |file_path|
+ expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
+ end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test')
+ expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
+ expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
+
+ described_class.load_gdk_vite_config
+ end
+ end
+
+ context 'when config file is missing' do
+ it 'does nothing' do
+ expect(File).to receive(:exist?) do |file_path|
+ expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
+ end.and_return(false)
+ expect(ViteRuby).not_to receive(:configure)
+ expect(ViteRuby.env).not_to receive(:[]=).with('VITE_ENABLED', 'false')
+ expect(ViteRuby.env).not_to receive(:[]=).with('VITE_ENABLED', 'true')
+
+ described_class.load_gdk_vite_config
+ end
+ end
+ end
+
+ context 'when in production environment' do
+ before do
+ stub_env('RAILS_ENV', 'production')
+ end
+
+ it 'does not load and configure ViteRuby' do
+ expect(YAML).not_to receive(:safe_load_file)
+ expect(ViteRuby).not_to receive(:configure)
+ expect(ViteRuby.env).not_to receive(:[]=).with('VITE_ENABLED')
+
+ described_class.load_gdk_vite_config
+ end
+ end
+ end
+end