From 5afcbe03ead9ada87621888a31a62652b10a7e4f Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Wed, 20 Sep 2023 11:18:08 +0000 Subject: Add latest changes from gitlab-org/gitlab@16-4-stable-ee --- spec/lib/api/ci/helpers/runner_spec.rb | 34 +- spec/lib/api/entities/merge_request_basic_spec.rb | 2 +- spec/lib/api/entities/merge_request_diff_spec.rb | 44 ++ spec/lib/api/entities/ml/mlflow/get_run_spec.rb | 63 ++ spec/lib/api/entities/ml/mlflow/run_info_spec.rb | 2 +- spec/lib/api/entities/ml/mlflow/run_spec.rb | 20 +- .../lib/api/entities/ml/mlflow/search_runs_spec.rb | 37 ++ spec/lib/api/entities/project_spec.rb | 2 +- spec/lib/api/helpers/packages_helpers_spec.rb | 4 +- spec/lib/api/helpers_spec.rb | 222 ++++++- spec/lib/api/ml/mlflow/api_helpers_spec.rb | 24 + spec/lib/backup/database_model_spec.rb | 82 +++ spec/lib/backup/database_spec.rb | 92 +-- spec/lib/backup/gitaly_backup_spec.rb | 14 +- spec/lib/backup/repositories_spec.rb | 46 +- .../lib/banzai/filter/code_language_filter_spec.rb | 36 +- spec/lib/banzai/filter/inline_diff_filter_spec.rb | 2 +- .../bitbucket/representation/pull_request_spec.rb | 51 ++ .../common/graphql/get_members_query_spec.rb | 24 +- .../common/pipelines/entity_finisher_spec.rb | 6 +- .../member_attributes_transformer_spec.rb | 53 +- .../file_downloads/validations_spec.rb | 2 +- .../groups/loaders/group_loader_spec.rb | 27 + spec/lib/bulk_imports/network_error_spec.rb | 30 +- spec/lib/bulk_imports/pipeline/runner_spec.rb | 2 +- .../projects/pipelines/issues_pipeline_spec.rb | 34 ++ .../projects/pipelines/references_pipeline_spec.rb | 125 +++- spec/lib/bulk_imports/users_mapper_spec.rb | 26 +- spec/lib/click_house/bind_index_manager_spec.rb | 33 -- spec/lib/click_house/query_builder_spec.rb | 26 +- spec/lib/click_house/record_sync_context_spec.rb | 32 + spec/lib/click_house/sync_cursor_spec.rb | 35 ++ .../constraints/activity_pub_constrainer_spec.rb | 39 ++ .../my_batched_migration_spec_matcher.txt | 2 +- .../analytics/internal_events_generator_spec.rb | 6 +- .../partitioning/foreign_keys_generator_spec.rb | 6 +- .../snowplow_event_definition_generator_spec.rb | 5 +- spec/lib/gitlab/auth/o_auth/provider_spec.rb | 14 +- .../gitlab/auth/user_access_denied_reason_spec.rb | 2 +- spec/lib/gitlab/auth_spec.rb | 24 +- ...as_merge_request_of_vulnerability_reads_spec.rb | 101 ++++ .../backfill_nuget_normalized_version_spec.rb | 74 +++ ...tatistics_storage_size_with_recent_size_spec.rb | 165 ++++++ .../backfill_snippet_repositories_spec.rb | 4 +- ...backfill_user_preferences_with_defaults_spec.rb | 66 +++ .../backfill_users_with_defaults_spec.rb | 68 +++ ...t_credit_card_validation_data_to_hashes_spec.rb | 81 +++ .../rebalance_partition_id_spec.rb | 46 -- ...e_users_set_external_if_service_account_spec.rb | 42 ++ spec/lib/gitlab/bitbucket_import/importer_spec.rb | 22 +- .../importers/pull_request_importer_spec.rb | 166 ++++++ .../importers/pull_requests_importer_spec.rb | 71 +++ .../importers/repository_importer_spec.rb | 49 ++ .../bitbucket_import/parallel_importer_spec.rb | 43 ++ .../gitlab/bitbucket_import/user_finder_spec.rb | 75 +++ .../bitbucket_server_import/importer_spec.rb | 653 --------------------- .../gitlab/checks/matching_merge_request_spec.rb | 45 +- .../lib/gitlab/ci/build/artifacts/metadata_spec.rb | 206 ++++--- spec/lib/gitlab/ci/build/duration_parser_spec.rb | 6 +- .../lib/gitlab/ci/components/instance_path_spec.rb | 251 +++++--- spec/lib/gitlab/ci/config/entry/bridge_spec.rb | 2 +- spec/lib/gitlab/ci/config/entry/default_spec.rb | 2 +- .../ci/config/entry/include/rules/rule_spec.rb | 38 +- .../gitlab/ci/config/entry/include/rules_spec.rb | 35 +- spec/lib/gitlab/ci/config/entry/job_spec.rb | 20 +- .../lib/gitlab/ci/config/entry/processable_spec.rb | 33 ++ spec/lib/gitlab/ci/config/external/context_spec.rb | 6 +- .../ci/config/external/file/component_spec.rb | 35 ++ .../ci/config/external/mapper/verifier_spec.rb | 26 - .../gitlab/ci/config/external/processor_spec.rb | 14 +- spec/lib/gitlab/ci/config/external/rules_spec.rb | 218 +++---- .../ci/config/interpolation/interpolator_spec.rb | 3 +- .../gitlab/ci/config/yaml/tags/reference_spec.rb | 4 +- .../gitlab/ci/config/yaml/tags/resolver_spec.rb | 4 +- spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb | 65 +- spec/lib/gitlab/ci/parsers/security/common_spec.rb | 6 +- spec/lib/gitlab/ci/pipeline/seed/build_spec.rb | 120 ++++ spec/lib/gitlab/ci/reports/sbom/component_spec.rb | 12 + spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb | 54 ++ spec/lib/gitlab/ci/templates/MATLAB_spec.rb | 2 +- spec/lib/gitlab/ci/trace/stream_spec.rb | 50 ++ .../gitlab/ci/variables/builder/pipeline_spec.rb | 7 +- spec/lib/gitlab/ci/variables/builder_spec.rb | 23 +- spec/lib/gitlab/ci/yaml_processor_spec.rb | 122 ++++ spec/lib/gitlab/composer/version_index_spec.rb | 115 +++- .../content_security_policy/config_loader_spec.rb | 56 ++ spec/lib/gitlab/current_settings_spec.rb | 34 +- spec/lib/gitlab/data_builder/deployment_spec.rb | 9 + .../async_indexes/migration_helpers_spec.rb | 8 + .../lib/gitlab/database/click_house_client_spec.rb | 191 +++--- spec/lib/gitlab/database/gitlab_schema_spec.rb | 2 +- .../gitlab/database/load_balancing/host_spec.rb | 33 +- .../database/load_balancing/load_balancer_spec.rb | 51 +- .../load_balancing/rack_middleware_spec.rb | 119 ++-- .../load_balancing/service_discovery_spec.rb | 127 +++- .../sidekiq_server_middleware_spec.rb | 44 +- .../database/load_balancing/sticking_spec.rb | 353 +++-------- .../database/migrations/instrumentation_spec.rb | 2 + .../database/no_cross_db_foreign_keys_spec.rb | 7 +- .../no_overrides_for_through_associations_spec.rb | 80 +++ .../partitioning/ci_sliding_list_strategy_spec.rb | 26 + .../database/partitioning/monthly_strategy_spec.rb | 30 +- .../partitioning/partition_manager_spec.rb | 155 ++++- .../partitioning/sliding_list_strategy_spec.rb | 26 + spec/lib/gitlab/database/partitioning_spec.rb | 91 ++- spec/lib/gitlab/database/reindexing_spec.rb | 20 + spec/lib/gitlab/database/tables_truncate_spec.rb | 278 +++++---- spec/lib/gitlab/database_spec.rb | 53 -- spec/lib/gitlab/database_warnings_spec.rb | 96 +++ .../email/handler/create_note_handler_spec.rb | 4 +- .../email/handler/service_desk_handler_spec.rb | 8 +- .../in_product_marketing/admin_verify_spec.rb | 45 -- .../message/in_product_marketing/base_spec.rb | 108 ---- .../message/in_product_marketing/create_spec.rb | 28 - .../in_product_marketing/team_short_spec.rb | 47 -- .../message/in_product_marketing/team_spec.rb | 82 --- .../in_product_marketing/trial_short_spec.rb | 45 -- .../message/in_product_marketing/trial_spec.rb | 48 -- .../message/in_product_marketing/verify_spec.rb | 54 -- .../email/message/in_product_marketing_spec.rb | 35 -- .../gitlab/email/service_desk/custom_email_spec.rb | 37 ++ spec/lib/gitlab/etag_caching/middleware_spec.rb | 16 +- spec/lib/gitlab/etag_caching/router/rails_spec.rb | 14 - spec/lib/gitlab/etag_caching/store_spec.rb | 2 +- spec/lib/gitlab/event_store/store_spec.rb | 20 +- spec/lib/gitlab/experiment/rollout/feature_spec.rb | 2 +- spec/lib/gitlab/git/blame_spec.rb | 10 +- spec/lib/gitlab/git/diff_spec.rb | 25 + spec/lib/gitlab/git/repository_spec.rb | 31 + spec/lib/gitlab/git_access_snippet_spec.rb | 2 +- .../gitlab/gitaly_client/operation_service_spec.rb | 105 +++- spec/lib/gitlab/gitaly_client/ref_service_spec.rb | 110 ++++ .../gitaly_client/repository_service_spec.rb | 13 +- .../gitaly_client/with_feature_flag_actors_spec.rb | 23 +- .../github_import/attachments_downloader_spec.rb | 51 ++ spec/lib/gitlab/github_import/client_spec.rb | 22 +- .../importer/note_attachments_importer_spec.rb | 41 +- .../pull_requests/merged_by_importer_spec.rb | 4 + .../importer/pull_requests/review_importer_spec.rb | 4 + .../github_import/markdown/attachment_spec.rb | 24 +- .../gitlab/github_import/object_counter_spec.rb | 26 +- spec/lib/gitlab/github_import/user_finder_spec.rb | 269 +++++++-- spec/lib/gitlab/github_import_spec.rb | 4 +- spec/lib/gitlab/gl_repository/identifier_spec.rb | 6 +- spec/lib/gitlab/gl_repository/repo_type_spec.rb | 24 +- spec/lib/gitlab/gl_repository_spec.rb | 11 +- spec/lib/gitlab/gon_helper_spec.rb | 88 ++- .../graphql/deprecations/deprecation_spec.rb | 2 +- spec/lib/gitlab/group_search_results_spec.rb | 13 +- spec/lib/gitlab/http_spec.rb | 9 +- spec/lib/gitlab/import/errors_spec.rb | 1 + spec/lib/gitlab/import_export/all_models.yml | 8 +- .../import_export/attributes_permitter_spec.rb | 1 - .../base/relation_object_saver_spec.rb | 31 +- .../gitlab/import_export/command_line_util_spec.rb | 2 +- .../decompressed_archive_size_validator_spec.rb | 14 +- .../lib/gitlab/import_export/file_importer_spec.rb | 3 +- .../import_export/import_test_coverage_spec.rb | 1 - .../import_export/json/ndjson_writer_spec.rb | 11 +- .../json/streaming_serializer_spec.rb | 3 +- .../import_export/project/export_task_spec.rb | 2 +- .../import_export/project/tree_restorer_spec.rb | 2 +- spec/lib/gitlab/import_sources_spec.rb | 54 +- .../instrumentation/redis_interceptor_spec.rb | 1 + spec/lib/gitlab/job_waiter_spec.rb | 40 +- spec/lib/gitlab/manifest_import/metadata_spec.rb | 18 - spec/lib/gitlab/metrics/dashboard/cache_spec.rb | 88 --- .../lib/gitlab/metrics/dashboard/processor_spec.rb | 30 - .../dashboard/repo_dashboard_finder_spec.rb | 54 -- .../metrics/dashboard/stages/url_validator_spec.rb | 101 ---- spec/lib/gitlab/metrics/dashboard/url_spec.rb | 106 ---- .../metrics/samplers/database_sampler_spec.rb | 78 +-- .../middleware/webhook_recursion_detection_spec.rb | 2 +- spec/lib/gitlab/observability_spec.rb | 29 +- spec/lib/gitlab/other_markup_spec.rb | 41 +- spec/lib/gitlab/pages/cache_control_spec.rb | 88 --- spec/lib/gitlab/pages/virtual_host_finder_spec.rb | 58 -- spec/lib/gitlab/pages_spec.rb | 87 ++- .../gitlab/pagination/cursor_based_keyset_spec.rb | 102 +++- spec/lib/gitlab/patch/redis_cache_store_spec.rb | 66 ++- .../lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb | 89 +++ .../prometheus/additional_metrics_parser_spec.rb | 248 -------- .../additional_metrics_deployment_query_spec.rb | 23 - .../additional_metrics_environment_query_spec.rb | 45 -- spec/lib/gitlab/rack_attack/request_spec.rb | 33 ++ spec/lib/gitlab/redis/chat_spec.rb | 2 +- spec/lib/gitlab/redis/etag_cache_spec.rb | 56 -- spec/lib/gitlab/redis/multi_store_spec.rb | 100 ++++ spec/lib/gitlab/redis/pubsub_spec.rb | 8 + spec/lib/gitlab/redis/queues_metadata_spec.rb | 43 ++ spec/lib/gitlab/redis/workhorse_spec.rb | 44 ++ spec/lib/gitlab/regex_spec.rb | 25 - spec/lib/gitlab/repo_path_spec.rb | 14 +- spec/lib/gitlab/search_results_spec.rb | 54 +- .../lib/gitlab/security/scan_configuration_spec.rb | 10 + spec/lib/gitlab/setup_helper/workhorse_spec.rb | 10 +- .../duplicate_jobs/client_spec.rb | 2 +- .../duplicate_jobs/duplicate_job_spec.rb | 65 +- .../sidekiq_middleware/server_metrics_spec.rb | 6 +- spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb | 3 +- spec/lib/gitlab/sidekiq_queue_spec.rb | 2 +- spec/lib/gitlab/sql/cte_spec.rb | 3 +- spec/lib/gitlab/sql/pattern_spec.rb | 46 +- spec/lib/gitlab/time_tracking_formatter_spec.rb | 8 + .../destinations/database_events_snowplow_spec.rb | 4 + .../gitlab/tracking/service_ping_context_spec.rb | 24 +- spec/lib/gitlab/tracking/standard_context_spec.rb | 3 +- spec/lib/gitlab/url_builder_spec.rb | 3 + spec/lib/gitlab/url_sanitizer_spec.rb | 19 + spec/lib/gitlab/usage/metric_definition_spec.rb | 60 +- ...background_migration_failed_jobs_metric_spec.rb | 16 +- .../count_connected_agents_metric_spec.rb | 12 + spec/lib/gitlab/usage/metrics/query_spec.rb | 2 +- spec/lib/gitlab/usage/time_series_storable_spec.rb | 40 ++ .../ci_template_unique_counter_spec.rb | 2 +- .../issue_activity_unique_counter_spec.rb | 186 ++---- .../kubernetes_agent_counter_spec.rb | 6 +- spec/lib/gitlab/usage_data_queries_spec.rb | 2 +- spec/lib/gitlab/usage_data_spec.rb | 8 +- spec/lib/gitlab/user_access_snippet_spec.rb | 2 +- spec/lib/gitlab/utils/markdown_spec.rb | 35 +- spec/lib/gitlab/workhorse_spec.rb | 145 ++++- spec/lib/gitlab/x509/certificate_spec.rb | 2 +- spec/lib/gitlab/x509/commit_sigstore_spec.rb | 53 ++ spec/lib/gitlab/x509/commit_spec.rb | 6 +- spec/lib/gitlab/x509/signature_sigstore_spec.rb | 453 ++++++++++++++ spec/lib/gitlab/x509/signature_spec.rb | 2 +- spec/lib/gitlab/x509/tag_sigstore_spec.rb | 45 ++ spec/lib/gitlab/x509/tag_spec.rb | 27 +- spec/lib/peek/views/click_house_spec.rb | 13 +- spec/lib/sidebars/admin/panel_spec.rb | 8 +- spec/lib/sidebars/concerns/has_avatar_spec.rb | 29 + spec/lib/sidebars/explore/panel_spec.rb | 17 + .../groups/menus/packages_registries_menu_spec.rb | 39 +- spec/lib/sidebars/groups/menus/scope_menu_spec.rb | 5 +- .../sidebars/groups/super_sidebar_panel_spec.rb | 8 +- spec/lib/sidebars/menu_item_spec.rb | 9 +- spec/lib/sidebars/menu_spec.rb | 12 + .../organizations/menus/scope_menu_spec.rb | 4 +- spec/lib/sidebars/organizations/panel_spec.rb | 1 + .../organizations/super_sidebar_panel_spec.rb | 7 +- spec/lib/sidebars/panel_spec.rb | 18 +- .../sidebars/projects/menus/issues_menu_spec.rb | 1 + .../sidebars/projects/menus/monitor_menu_spec.rb | 14 - .../menus/packages_registries_menu_spec.rb | 25 +- .../lib/sidebars/projects/menus/scope_menu_spec.rb | 5 +- .../sidebars/projects/super_sidebar_panel_spec.rb | 8 +- spec/lib/sidebars/search/panel_spec.rb | 7 +- spec/lib/sidebars/static_menu_spec.rb | 4 + .../user_profile/menus/overview_menu_spec.rb | 5 +- spec/lib/sidebars/user_profile/panel_spec.rb | 7 +- spec/lib/sidebars/user_settings/panel_spec.rb | 3 +- .../your_work/menus/organizations_menu_spec.rb | 42 ++ spec/lib/sidebars/your_work/panel_spec.rb | 3 +- .../system_check/app/table_truncate_check_spec.rb | 75 +++ spec/lib/unnested_in_filters/rewriter_spec.rb | 251 ++++---- spec/lib/users/internal_spec.rb | 97 +++ 257 files changed, 7606 insertions(+), 4312 deletions(-) create mode 100644 spec/lib/api/entities/merge_request_diff_spec.rb create mode 100644 spec/lib/api/entities/ml/mlflow/get_run_spec.rb create mode 100644 spec/lib/api/entities/ml/mlflow/search_runs_spec.rb create mode 100644 spec/lib/backup/database_model_spec.rb delete mode 100644 spec/lib/click_house/bind_index_manager_spec.rb create mode 100644 spec/lib/click_house/record_sync_context_spec.rb create mode 100644 spec/lib/click_house/sync_cursor_spec.rb create mode 100644 spec/lib/constraints/activity_pub_constrainer_spec.rb create mode 100644 spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb create mode 100644 spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb create mode 100644 spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb create mode 100644 spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb create mode 100644 spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb create mode 100644 spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb create mode 100644 spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb create mode 100644 spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb create mode 100644 spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb create mode 100644 spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb create mode 100644 spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb create mode 100644 spec/lib/gitlab/bitbucket_import/user_finder_spec.rb delete mode 100644 spec/lib/gitlab/bitbucket_server_import/importer_spec.rb create mode 100644 spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb create mode 100644 spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb create mode 100644 spec/lib/gitlab/database_warnings_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb delete mode 100644 spec/lib/gitlab/email/message/in_product_marketing_spec.rb create mode 100644 spec/lib/gitlab/email/service_desk/custom_email_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/cache_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/processor_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb delete mode 100644 spec/lib/gitlab/metrics/dashboard/url_spec.rb delete mode 100644 spec/lib/gitlab/pages/cache_control_spec.rb create mode 100644 spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb delete mode 100644 spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb delete mode 100644 spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb delete mode 100644 spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb delete mode 100644 spec/lib/gitlab/redis/etag_cache_spec.rb create mode 100644 spec/lib/gitlab/redis/pubsub_spec.rb create mode 100644 spec/lib/gitlab/redis/queues_metadata_spec.rb create mode 100644 spec/lib/gitlab/redis/workhorse_spec.rb create mode 100644 spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb create mode 100644 spec/lib/gitlab/usage/time_series_storable_spec.rb create mode 100644 spec/lib/gitlab/x509/commit_sigstore_spec.rb create mode 100644 spec/lib/gitlab/x509/signature_sigstore_spec.rb create mode 100644 spec/lib/gitlab/x509/tag_sigstore_spec.rb create mode 100644 spec/lib/sidebars/concerns/has_avatar_spec.rb create mode 100644 spec/lib/sidebars/explore/panel_spec.rb create mode 100644 spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb create mode 100644 spec/lib/system_check/app/table_truncate_check_spec.rb create mode 100644 spec/lib/users/internal_spec.rb (limited to 'spec/lib') diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb index 62b79c77b4a..70504a58af3 100644 --- a/spec/lib/api/ci/helpers/runner_spec.rb +++ b/spec/lib/api/ci/helpers/runner_spec.rb @@ -3,10 +3,18 @@ require 'spec_helper' RSpec.describe API::Ci::Helpers::Runner do - let(:helper) { Class.new { include API::Ci::Helpers::Runner }.new } + let(:helper) do + Class.new do + include API::Ci::Helpers::Runner + include Gitlab::RackLoadBalancingHelpers + end.new + end + + let(:env_hash) { {} } + let(:request) { instance_double(Rack::Request, env: env_hash) } before do - allow(helper).to receive(:env).and_return({}) + allow(helper).to receive(:request).and_return(request) end describe '#current_job', feature_category: :continuous_integration do @@ -16,17 +24,22 @@ RSpec.describe API::Ci::Helpers::Runner do allow(helper).to receive(:params).and_return(id: build.id) expect(Ci::Build.sticking) - .to receive(:stick_or_unstick_request) - .with({}, :build, build.id) + .to receive(:find_caught_up_replica) + .with(:build, build.id) helper.current_job + + stick_object = env_hash[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].first + expect(stick_object[0]).to eq(Ci::Build.sticking) + expect(stick_object[1]).to eq(:build) + expect(stick_object[2]).to eq(build.id) end it 'does not handle sticking if no build ID was specified' do allow(helper).to receive(:params).and_return({}) expect(Ci::Build.sticking) - .not_to receive(:stick_or_unstick_request) + .not_to receive(:find_caught_up_replica) helper.current_job end @@ -45,17 +58,22 @@ RSpec.describe API::Ci::Helpers::Runner do allow(helper).to receive(:params).and_return(token: runner.token) expect(Ci::Runner.sticking) - .to receive(:stick_or_unstick_request) - .with({}, :runner, runner.token) + .to receive(:find_caught_up_replica) + .with(:runner, runner.token) helper.current_runner + + stick_object = env_hash[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].first + expect(stick_object[0]).to eq(Ci::Runner.sticking) + expect(stick_object[1]).to eq(:runner) + expect(stick_object[2]).to eq(runner.token) end it 'does not handle sticking if no token was specified' do allow(helper).to receive(:params).and_return({}) expect(Ci::Runner.sticking) - .not_to receive(:stick_or_unstick_request) + .not_to receive(:find_caught_up_replica) helper.current_runner end diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb index 89e19f8529e..0cf0a57fa87 100644 --- a/spec/lib/api/entities/merge_request_basic_spec.rb +++ b/spec/lib/api/entities/merge_request_basic_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ::API::Entities::MergeRequestBasic do +RSpec.describe ::API::Entities::MergeRequestBasic, feature_category: :code_review_workflow do let_it_be(:user) { create(:user) } let_it_be(:merge_request) { create(:merge_request) } let_it_be(:labels) { create_list(:label, 3) } diff --git a/spec/lib/api/entities/merge_request_diff_spec.rb b/spec/lib/api/entities/merge_request_diff_spec.rb new file mode 100644 index 00000000000..a6927914316 --- /dev/null +++ b/spec/lib/api/entities/merge_request_diff_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::API::Entities::MergeRequestDiff, feature_category: :code_review_workflow do + let_it_be(:user) { create(:user) } + let_it_be(:merge_request) { create(:merge_request) } + let_it_be(:project) { merge_request.target_project } + let_it_be(:entity) { described_class.new(merge_request.merge_request_diffs.first) } + + before do + merge_request.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') + merge_request.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') + end + + subject(:json) { entity.as_json } + + it "includes expected fields" do + expected_fields = %i[ + id head_commit_sha base_commit_sha start_commit_sha created_at + merge_request_id state real_size patch_id_sha + ] + + is_expected.to include(*expected_fields) + end + + it "returns expected data" do + merge_request_diff = merge_request.merge_request_diffs.first + + expect(entity.as_json).to eq( + { + id: merge_request_diff.id, + head_commit_sha: merge_request_diff.head_commit_sha, + base_commit_sha: merge_request_diff.base_commit_sha, + start_commit_sha: merge_request_diff.start_commit_sha, + created_at: merge_request_diff.created_at, + merge_request_id: merge_request.id, + state: merge_request_diff.state, + real_size: merge_request_diff.real_size, + patch_id_sha: merge_request_diff.patch_id_sha + } + ) + end +end diff --git a/spec/lib/api/entities/ml/mlflow/get_run_spec.rb b/spec/lib/api/entities/ml/mlflow/get_run_spec.rb new file mode 100644 index 00000000000..513ecdeee3c --- /dev/null +++ b/spec/lib/api/entities/ml/mlflow/get_run_spec.rb @@ -0,0 +1,63 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::Ml::Mlflow::GetRun, feature_category: :mlops do + let_it_be(:candidate) { build(:ml_candidates, :with_metrics_and_params) } + + subject { described_class.new(candidate).as_json } + + it 'has run key' do + expect(subject).to have_key(:run) + end + + it 'has the id' do + expect(subject.dig(:run, :info, :run_id)).to eq(candidate.eid.to_s) + end + + it 'presents the metrics' do + expect(subject.dig(:run, :data, :metrics).size).to eq(candidate.metrics.size) + end + + it 'presents metrics correctly' do + presented_metric = subject.dig(:run, :data, :metrics)[0] + metric = candidate.metrics[0] + + expect(presented_metric[:key]).to eq(metric.name) + expect(presented_metric[:value]).to eq(metric.value) + expect(presented_metric[:timestamp]).to eq(metric.tracked_at) + expect(presented_metric[:step]).to eq(metric.step) + end + + it 'presents the params' do + expect(subject.dig(:run, :data, :params).size).to eq(candidate.params.size) + end + + it 'presents params correctly' do + presented_param = subject.dig(:run, :data, :params)[0] + param = candidate.params[0] + + expect(presented_param[:key]).to eq(param.name) + expect(presented_param[:value]).to eq(param.value) + end + + context 'when candidate has no metrics' do + before do + allow(candidate).to receive(:metrics).and_return([]) + end + + it 'returns empty data' do + expect(subject.dig(:run, :data, :metrics)).to be_empty + end + end + + context 'when candidate has no params' do + before do + allow(candidate).to receive(:params).and_return([]) + end + + it 'data is empty' do + expect(subject.dig(:run, :data, :params)).to be_empty + end + end +end diff --git a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb index 28fef16a532..1664d9f18d2 100644 --- a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb +++ b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do - let_it_be(:candidate) { create(:ml_candidates) } + let_it_be(:candidate) { build(:ml_candidates) } subject { described_class.new(candidate, packages_url: 'http://example.com').as_json } diff --git a/spec/lib/api/entities/ml/mlflow/run_spec.rb b/spec/lib/api/entities/ml/mlflow/run_spec.rb index a57f70f788b..58148212a7b 100644 --- a/spec/lib/api/entities/ml/mlflow/run_spec.rb +++ b/spec/lib/api/entities/ml/mlflow/run_spec.rb @@ -3,24 +3,20 @@ require 'spec_helper' RSpec.describe API::Entities::Ml::Mlflow::Run do - let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params) } + let_it_be(:candidate) { build(:ml_candidates, :with_metrics_and_params) } subject { described_class.new(candidate).as_json } - it 'has run key' do - expect(subject).to have_key(:run) - end - it 'has the id' do - expect(subject.dig(:run, :info, :run_id)).to eq(candidate.eid.to_s) + expect(subject.dig(:info, :run_id)).to eq(candidate.eid.to_s) end it 'presents the metrics' do - expect(subject.dig(:run, :data, :metrics).size).to eq(candidate.metrics.size) + expect(subject.dig(:data, :metrics).size).to eq(candidate.metrics.size) end it 'presents metrics correctly' do - presented_metric = subject.dig(:run, :data, :metrics)[0] + presented_metric = subject.dig(:data, :metrics)[0] metric = candidate.metrics[0] expect(presented_metric[:key]).to eq(metric.name) @@ -30,11 +26,11 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do end it 'presents the params' do - expect(subject.dig(:run, :data, :params).size).to eq(candidate.params.size) + expect(subject.dig(:data, :params).size).to eq(candidate.params.size) end it 'presents params correctly' do - presented_param = subject.dig(:run, :data, :params)[0] + presented_param = subject.dig(:data, :params)[0] param = candidate.params[0] expect(presented_param[:key]).to eq(param.name) @@ -47,7 +43,7 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do end it 'returns empty data' do - expect(subject.dig(:run, :data, :metrics)).to be_empty + expect(subject.dig(:data, :metrics)).to be_empty end end @@ -57,7 +53,7 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do end it 'data is empty' do - expect(subject.dig(:run, :data, :params)).to be_empty + expect(subject.dig(:data, :params)).to be_empty end end end diff --git a/spec/lib/api/entities/ml/mlflow/search_runs_spec.rb b/spec/lib/api/entities/ml/mlflow/search_runs_spec.rb new file mode 100644 index 00000000000..6ed59d454fa --- /dev/null +++ b/spec/lib/api/entities/ml/mlflow/search_runs_spec.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::Ml::Mlflow::SearchRuns, feature_category: :mlops do + let_it_be(:candidates) { [build_stubbed(:ml_candidates, :with_metrics_and_params), build_stubbed(:ml_candidates)] } + + let(:next_page_token) { 'abcdef' } + + subject { described_class.new({ candidates: candidates, next_page_token: next_page_token }).as_json } + + it 'presents the candidates', :aggregate_failures do + expect(subject[:runs].size).to eq(2) + expect(subject.dig(:runs, 0, :info, :run_id)).to eq(candidates[0].eid.to_s) + expect(subject.dig(:runs, 1, :info, :run_id)).to eq(candidates[1].eid.to_s) + end + + it 'presents metrics', :aggregate_failures do + expect(subject.dig(:runs, 0, :data, :metrics).size).to eq(candidates[0].metrics.size) + expect(subject.dig(:runs, 1, :data, :metrics).size).to eq(0) + + presented_metric = subject.dig(:runs, 0, :data, :metrics, 0, :key) + metric = candidates[0].metrics[0].name + + expect(presented_metric).to eq(metric) + end + + it 'presents params', :aggregate_failures do + expect(subject.dig(:runs, 0, :data, :params).size).to eq(candidates[0].params.size) + expect(subject.dig(:runs, 1, :data, :params).size).to eq(0) + + presented_param = subject.dig(:runs, 0, :data, :params, 0, :key) + param = candidates[0].params[0].name + + expect(presented_param).to eq(param) + end +end diff --git a/spec/lib/api/entities/project_spec.rb b/spec/lib/api/entities/project_spec.rb index 5d18b93228f..2c2cabba5e9 100644 --- a/spec/lib/api/entities/project_spec.rb +++ b/spec/lib/api/entities/project_spec.rb @@ -26,7 +26,7 @@ RSpec.describe ::API::Entities::Project do end end - describe '.service_desk_address' do + describe '.service_desk_address', feature_category: :service_desk do before do allow(project).to receive(:service_desk_enabled?).and_return(true) end diff --git a/spec/lib/api/helpers/packages_helpers_spec.rb b/spec/lib/api/helpers/packages_helpers_spec.rb index 6ba4396c396..bb7b9d688ea 100644 --- a/spec/lib/api/helpers/packages_helpers_spec.rb +++ b/spec/lib/api/helpers/packages_helpers_spec.rb @@ -292,7 +292,7 @@ RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registr let(:label) { 'counts.package_events_i_package_push_package_by_deploy_token' } let(:property) { 'i_package_push_package_by_deploy_token' } let(:service_ping_context) do - [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: 'counts.package_events_i_package_push_package_by_deploy_token').to_h] + [Gitlab::Usage::MetricDefinition.context_for('counts.package_events_i_package_push_package_by_deploy_token').to_h] end it 'logs a snowplow event' do @@ -320,7 +320,7 @@ RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registr let(:label) { 'counts.package_events_i_package_pull_package_by_guest' } let(:property) { 'i_package_pull_package_by_guest' } let(:service_ping_context) do - [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: 'counts.package_events_i_package_pull_package_by_guest').to_h] + [Gitlab::Usage::MetricDefinition.context_for('counts.package_events_i_package_pull_package_by_guest').to_h] end it 'logs a snowplow event' do diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb index 667ee72f821..dd62343890e 100644 --- a/spec/lib/api/helpers_spec.rb +++ b/spec/lib/api/helpers_spec.rb @@ -11,7 +11,6 @@ RSpec.describe API::Helpers, feature_category: :shared do include Rack::Test::Methods let(:user) { build(:user, id: 42) } - let(:request) { instance_double(Rack::Request) } let(:helper) do Class.new(Grape::API::Instance) do helpers API::APIGuard::HelperMethods @@ -36,18 +35,23 @@ RSpec.describe API::Helpers, feature_category: :shared do allow_any_instance_of(described_class).to receive(:initial_current_user).and_return(user) expect(ApplicationRecord.sticking) - .to receive(:stick_or_unstick_request).with(any_args, :user, 42) + .to receive(:find_caught_up_replica).with(:user, 42) get 'user' expect(Gitlab::Json.parse(last_response.body)).to eq({ 'id' => user.id }) + + stick_object = last_request.env[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].first + expect(stick_object[0]).to eq(User.sticking) + expect(stick_object[1]).to eq(:user) + expect(stick_object[2]).to eq(42) end it 'does not handle sticking if no user could be found' do allow_any_instance_of(described_class).to receive(:initial_current_user).and_return(nil) expect(ApplicationRecord.sticking) - .not_to receive(:stick_or_unstick_request) + .not_to receive(:find_caught_up_replica) get 'user' @@ -243,6 +247,165 @@ RSpec.describe API::Helpers, feature_category: :shared do end end + describe '#find_pipeline' do + let(:pipeline) { create(:ci_pipeline) } + + shared_examples 'pipeline finder' do + context 'when pipeline exists' do + it 'returns requested pipeline' do + expect(helper.find_pipeline(existing_id)).to eq(pipeline) + end + end + + context 'when pipeline does not exists' do + it 'returns nil' do + expect(helper.find_pipeline(non_existing_id)).to be_nil + end + end + + context 'when pipeline id is not provided' do + it 'returns nil' do + expect(helper.find_pipeline(nil)).to be_nil + end + end + end + + context 'when ID is used as an argument' do + let(:existing_id) { pipeline.id } + let(:non_existing_id) { non_existing_record_id } + + it_behaves_like 'pipeline finder' + end + + context 'when string ID is used as an argument' do + let(:existing_id) { pipeline.id.to_s } + let(:non_existing_id) { non_existing_record_id } + + it_behaves_like 'pipeline finder' + end + + context 'when ID is a negative number' do + let(:existing_id) { pipeline.id } + let(:non_existing_id) { -1 } + + it_behaves_like 'pipeline finder' + end + end + + describe '#find_pipeline!' do + let_it_be(:project) { create(:project, :public) } + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be(:user) { create(:user) } + + shared_examples 'private project without access' do + before do + project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private')) + allow(helper).to receive(:authenticate_non_public?).and_return(false) + end + + it 'returns not found' do + expect(helper).to receive(:not_found!) + + helper.find_pipeline!(pipeline.id) + end + end + + context 'when user is authenticated' do + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:initial_current_user).and_return(user) + end + + context 'public project' do + it 'returns requested pipeline' do + expect(helper.find_pipeline!(pipeline.id)).to eq(pipeline) + end + end + + context 'private project' do + it_behaves_like 'private project without access' + + context 'without read pipeline permission' do + before do + allow(helper).to receive(:can?).with(user, :read_pipeline, pipeline).and_return(false) + end + + it_behaves_like 'private project without access' + end + end + + context 'with read pipeline permission' do + before do + allow(helper).to receive(:can?).with(user, :read_pipeline, pipeline).and_return(true) + end + + it 'returns requested pipeline' do + expect(helper.find_pipeline!(pipeline.id)).to eq(pipeline) + end + end + end + + context 'when user is not authenticated' do + before do + allow(helper).to receive(:current_user).and_return(nil) + allow(helper).to receive(:initial_current_user).and_return(nil) + end + + context 'public project' do + it 'returns requested pipeline' do + expect(helper.find_pipeline!(pipeline.id)).to eq(pipeline) + end + end + + context 'private project' do + it_behaves_like 'private project without access' + end + end + + context 'support for IDs and paths as argument' do + let_it_be(:project) { create(:project) } + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + + let(:user) { project.first_owner } + + before do + allow(helper).to receive(:current_user).and_return(user) + allow(helper).to receive(:authorized_project_scope?).and_return(true) + allow(helper).to receive(:job_token_authentication?).and_return(false) + allow(helper).to receive(:authenticate_non_public?).and_return(false) + end + + shared_examples 'pipeline finder' do + context 'when pipeline exists' do + it 'returns requested pipeline' do + expect(helper.find_pipeline!(existing_id)).to eq(pipeline) + end + + it 'returns nil' do + expect(helper).to receive(:render_api_error!).with('404 Pipeline Not Found', 404) + expect(helper.find_pipeline!(non_existing_id)).to be_nil + end + end + end + + context 'when ID is used as an argument' do + context 'when pipeline id is an integer' do + let(:existing_id) { pipeline.id } + let(:non_existing_id) { non_existing_record_id } + + it_behaves_like 'pipeline finder' + end + + context 'when pipeline id is a string' do + let(:existing_id) { pipeline.id.to_s } + let(:non_existing_id) { "non_existing_record_id" } + + it_behaves_like 'pipeline finder' + end + end + end + end + describe '#find_group!' do let_it_be(:group) { create(:group, :public) } let_it_be(:user) { create(:user) } @@ -628,10 +791,12 @@ RSpec.describe API::Helpers, feature_category: :shared do end it 'logs an exception for unknown event' do - expect(Gitlab::AppLogger).to receive(:warn).with( - "Internal Event tracking event failed for event: #{unknown_event}, message: Unknown event: #{unknown_event}" - ) - + expect(Gitlab::InternalEvents).to receive(:track_event).and_raise(Gitlab::InternalEvents::UnknownEventError, "Unknown event: #{unknown_event}") + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + .with( + instance_of(Gitlab::InternalEvents::UnknownEventError), + event_name: unknown_event + ) helper.track_event(unknown_event, user_id: user_id, namespace_id: namespace_id, project_id: project_id) end @@ -1072,4 +1237,47 @@ RSpec.describe API::Helpers, feature_category: :shared do it_behaves_like 'authorized' end end + + describe "attributes_for_keys" do + let(:hash) do + { + existing_key_with_present_value: 'actual value', + existing_key_with_nil_value: nil, + existing_key_with_false_value: false + } + end + + let(:parameters) { ::ActionController::Parameters.new(hash) } + let(:symbol_keys) do + %i[ + existing_key_with_present_value + existing_key_with_nil_value + existing_key_with_false_value + non_existing_key + ] + end + + let(:string_keys) { symbol_keys.map(&:to_s) } + let(:filtered_attrs) do + { + 'existing_key_with_present_value' => 'actual value', + 'existing_key_with_false_value' => false + } + end + + let(:empty_attrs) { {} } + + where(:params, :keys, :attrs_result) do + ref(:hash) | ref(:symbol_keys) | ref(:filtered_attrs) + ref(:hash) | ref(:string_keys) | ref(:empty_attrs) + ref(:parameters) | ref(:symbol_keys) | ref(:filtered_attrs) + ref(:parameters) | ref(:string_keys) | ref(:filtered_attrs) + end + + with_them do + it 'returns the values for given keys' do + expect(helper.attributes_for_keys(keys, params)).to eq(attrs_result) + end + end + end end diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb index 4f6a37c66c4..757a73ed612 100644 --- a/spec/lib/api/ml/mlflow/api_helpers_spec.rb +++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb @@ -37,4 +37,28 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do it { is_expected.to eql("http://localhost/gitlab/root/api/v4/projects/#{user_project.id}/packages/generic") } end end + + describe '#candidates_order_params' do + using RSpec::Parameterized::TableSyntax + + subject { candidates_order_params(params) } + + where(:input, :order_by, :order_by_type, :sort) do + '' | nil | nil | nil + 'created_at' | 'created_at' | 'column' | nil + 'created_at ASC' | 'created_at' | 'column' | 'ASC' + 'metrics.something' | 'something' | 'metric' | nil + 'metrics.something asc' | 'something' | 'metric' | 'asc' + 'metrics.something.blah asc' | 'something' | 'metric' | 'asc' + 'params.something ASC' | nil | nil | 'ASC' + 'metadata.something ASC' | nil | nil | 'ASC' + end + with_them do + let(:params) { { order_by: input } } + + it 'is correct' do + is_expected.to include({ order_by: order_by, order_by_type: order_by_type, sort: sort }) + end + end + end end diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb new file mode 100644 index 00000000000..5758ad2c1aa --- /dev/null +++ b/spec/lib/backup/database_model_spec.rb @@ -0,0 +1,82 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature_category: :backup_restore do + let(:gitlab_database_name) { 'main' } + + describe '#connection' do + subject { described_class.new(gitlab_database_name).connection } + + it 'an instance of a ActiveRecord::Base.connection' do + subject.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases + end + end + + describe '#config' do + let(:application_config) do + { + adapter: 'postgresql', + host: 'some_host', + port: '5432' + } + end + + subject { described_class.new(gitlab_database_name).config } + + before do + allow( + Gitlab::Database.database_base_models_with_gitlab_shared[gitlab_database_name].connection_db_config + ).to receive(:configuration_hash).and_return(application_config) + end + + context 'when no GITLAB_BACKUP_PG* variables are set' do + it 'ActiveRecord backup configuration is expected to equal application configuration' do + expect(subject[:activerecord]).to eq(application_config) + end + + it 'PostgreSQL ENV is expected to equal application configuration' do + expect(subject[:pg_env]).to eq( + { + 'PGHOST' => application_config[:host], + 'PGPORT' => application_config[:port] + } + ) + end + end + + context 'when GITLAB_BACKUP_PG* variables are set' do + using RSpec::Parameterized::TableSyntax + + where(:env_variable, :overridden_value) do + 'GITLAB_BACKUP_PGHOST' | 'test.invalid.' + 'GITLAB_BACKUP_PGUSER' | 'some_user' + 'GITLAB_BACKUP_PGPORT' | '1543' + 'GITLAB_BACKUP_PGPASSWORD' | 'secret' + 'GITLAB_BACKUP_PGSSLMODE' | 'allow' + 'GITLAB_BACKUP_PGSSLKEY' | 'some_key' + 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert' + 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert' + 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl' + 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1' + end + + with_them do + let(:pg_env) { env_variable[/GITLAB_BACKUP_(\w+)/, 1] } + let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] } + + before do + stub_env(env_variable, overridden_value) + end + + it 'ActiveRecord backup configuration overrides application configuration' do + expect(subject[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value)) + end + + it 'PostgreSQL ENV overrides application configuration' do + expect(subject[:pg_env]).to include({ pg_env => overridden_value }) + end + end + end + end +end diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb index 61e6c59a1a5..2f14b403576 100644 --- a/spec/lib/backup/database_spec.rb +++ b/spec/lib/backup/database_spec.rb @@ -2,13 +2,7 @@ require 'spec_helper' -RSpec.configure do |rspec| - rspec.expect_with :rspec do |c| - c.max_formatted_output_length = nil - end -end - -RSpec.describe Backup::Database, feature_category: :backup_restore do +RSpec.describe Backup::Database, :reestablished_active_record_base, feature_category: :backup_restore do let(:progress) { StringIO.new } let(:output) { progress.string } let(:one_database_configured?) { base_models_for_backup.one? } @@ -37,13 +31,6 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do subject { described_class.new(progress, force: force) } - before do - base_models_for_backup.each do |_, base_model| - base_model.connection.rollback_transaction unless base_model.connection.open_transactions.zero? - allow(base_model.connection).to receive(:execute).and_call_original - end - end - it 'creates gzipped database dumps' do Dir.mktmpdir do |dir| subject.dump(dir, backup_id) @@ -62,14 +49,15 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do it 'uses snapshots' do Dir.mktmpdir do |dir| - base_model = Gitlab::Database.database_base_models['main'] - expect(base_model.connection).to receive(:begin_transaction).with( - isolation: :repeatable_read - ).and_call_original - expect(base_model.connection).to receive(:select_value).with( - "SELECT pg_export_snapshot()" - ).and_call_original - expect(base_model.connection).to receive(:rollback_transaction).and_call_original + expect_next_instances_of(Backup::DatabaseModel, 2) do |adapter| + expect(adapter.connection).to receive(:begin_transaction).with( + isolation: :repeatable_read + ).and_call_original + expect(adapter.connection).to receive(:select_value).with( + "SELECT pg_export_snapshot()" + ).and_call_original + expect(adapter.connection).to receive(:rollback_transaction).and_call_original + end subject.dump(dir, backup_id) end @@ -95,7 +83,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do it 'does not use snapshots' do Dir.mktmpdir do |dir| - base_model = Gitlab::Database.database_base_models['main'] + base_model = Backup::DatabaseModel.new('main') expect(base_model.connection).not_to receive(:begin_transaction).with( isolation: :repeatable_read ).and_call_original @@ -111,7 +99,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do describe 'pg_dump arguments' do let(:snapshot_id) { 'fake_id' } - let(:pg_args) do + let(:default_pg_args) do args = [ '--clean', '--if-exists' @@ -130,24 +118,35 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do before do allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper) allow(dumper).to receive(:dump).with(any_args).and_return(true) + end - base_models_for_backup.each do |_, base_model| - allow(base_model.connection).to receive(:select_value).with( - "SELECT pg_export_snapshot()" - ).and_return(snapshot_id) + shared_examples 'pg_dump arguments' do + it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do + number_of_databases = base_models_for_backup.count + if number_of_databases > 1 + expect_next_instances_of(Backup::DatabaseModel, number_of_databases) do |model| + expect(model.connection).to receive(:select_value).with( + "SELECT pg_export_snapshot()" + ).and_return(snapshot_id) + end + end + + expect(dumper).to receive(:dump).with(anything, anything, expected_pg_args) + + subject.dump(destination_dir, backup_id) end end - it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do - expect(dumper).to receive(:dump).with(anything, anything, pg_args) + context 'when no PostgreSQL schemas are specified' do + let(:expected_pg_args) { default_pg_args } - subject.dump(destination_dir, backup_id) + include_examples 'pg_dump arguments' end context 'when a PostgreSQL schema is used' do let(:schema) { 'gitlab' } - let(:additional_args) do - pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema| + let(:expected_pg_args) do + default_pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema| ['-n', schema.to_s] end end @@ -156,11 +155,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do allow(Gitlab.config.backup).to receive(:pg_schema).and_return(schema) end - it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do - expect(dumper).to receive(:dump).with(anything, anything, additional_args) - - subject.dump(destination_dir, backup_id) - end + include_examples 'pg_dump arguments' end end @@ -180,6 +175,25 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do end end end + + context 'when using GITLAB_BACKUP_* environment variables' do + before do + stub_env('GITLAB_BACKUP_PGHOST', 'test.invalid.') + end + + it 'will override database.yml configuration' do + # Expect an error because we can't connect to test.invalid. + expect do + Dir.mktmpdir { |dir| subject.dump(dir, backup_id) } + end.to raise_error(Backup::DatabaseBackupError) + + expect do + ApplicationRecord.connection.select_value('select 1') + end.not_to raise_error + + expect(ENV['PGHOST']).to be_nil + end + end end describe '#restore' do @@ -288,7 +302,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) end - expect(ENV).to receive(:[]=).with('PGHOST', 'test.example.com') + expect(ENV).to receive(:merge!).with(hash_including { 'PGHOST' => 'test.example.com' }) expect(ENV).not_to receive(:[]=).with('PGPASSWORD', anything) subject.restore(backup_dir) diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb index 1105f39124b..6c2656b1c48 100644 --- a/spec/lib/backup/gitaly_backup_spec.rb +++ b/spec/lib/backup/gitaly_backup_spec.rb @@ -45,9 +45,9 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do context 'create' do RSpec.shared_examples 'creates a repository backup' do it 'creates repository bundles', :aggregate_failures do - # Add data to the wiki, design repositories, and snippets, so they will be included in the dump. + # Add data to the wiki, and snippets, so they will be included in the dump. + # Design repositories already have data through the factory :project_with_design create(:wiki_page, container: project) - create(:design, :with_file, issue: create(:issue, project: project)) project_snippet = create(:project_snippet, :repository, project: project) personal_snippet = create(:personal_snippet, :repository, author: project.first_owner) @@ -56,7 +56,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do subject.start(:create, destination, backup_id: backup_id) subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::WIKI) - subject.enqueue(project, Gitlab::GlRepository::DESIGN) + subject.enqueue(project.design_management_repository, Gitlab::GlRepository::DESIGN) subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) subject.finish! @@ -126,13 +126,13 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do end context 'hashed storage' do - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } it_behaves_like 'creates a repository backup' end context 'legacy storage' do - let_it_be(:project) { create(:project, :repository, :legacy_storage) } + let_it_be(:project) { create(:project_with_design, :repository, :legacy_storage) } it_behaves_like 'creates a repository backup' end @@ -162,7 +162,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do end context 'restore' do - let_it_be(:project) { create(:project, :repository, :design_repo) } + let_it_be(:project) { create(:project_with_design, :repository) } let_it_be(:personal_snippet) { create(:personal_snippet, author: project.first_owner) } let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) } @@ -189,7 +189,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do subject.start(:restore, destination, backup_id: backup_id) subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::WIKI) - subject.enqueue(project, Gitlab::GlRepository::DESIGN) + subject.enqueue(project.design_management_repository, Gitlab::GlRepository::DESIGN) subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) subject.finish! diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb index d8794ba68a0..1f3818de4a0 100644 --- a/spec/lib/backup/repositories_spec.rb +++ b/spec/lib/backup/repositories_spec.rb @@ -22,7 +22,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do end describe '#dump' do - let_it_be(:projects) { create_list(:project, 5, :repository) } + let_it_be(:projects) { create_list(:project_with_design, 5, :repository) } RSpec.shared_examples 'creates repository bundles' do it 'calls enqueue for each repository type', :aggregate_failures do @@ -34,7 +34,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:finish!) @@ -42,13 +42,13 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do end context 'hashed storage' do - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } it_behaves_like 'creates repository bundles' end context 'legacy storage' do - let_it_be(:project) { create(:project, :repository, :legacy_storage) } + let_it_be(:project) { create(:project_with_design, :repository, :legacy_storage) } it_behaves_like 'creates repository bundles' end @@ -75,15 +75,19 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do create_list(:project, 2, :repository) create_list(:snippet, 2, :repository) + # Number of expected queries are 2 more than control_count + # to account for the queries for project.design_management_repository + # for each project. + # We are using 2 projects here. expect do subject.dump(destination, backup_id) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control_count + 2) end describe 'storages' do let(:storages) { %w{default} } - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } before do stub_storage_settings('test_second_storage' => { @@ -93,7 +97,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do end it 'calls enqueue for all repositories on the specified storage', :aggregate_failures do - excluded_project = create(:project, :repository, repository_storage: 'test_second_storage') + excluded_project = create(:project_with_design, :repository, repository_storage: 'test_second_storage') excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project) excluded_project_snippet.track_snippet_repository('test_second_storage') excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner) @@ -107,13 +111,13 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end describe 'paths' do - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } context 'project path' do let(:paths) { [project.full_path] } @@ -131,7 +135,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end @@ -152,14 +156,14 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end end describe 'skip_paths' do - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } let_it_be(:excluded_project) { create(:project, :repository) } context 'project path' do @@ -177,7 +181,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end @@ -197,7 +201,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end @@ -205,7 +209,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do end describe '#restore' do - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: project.first_owner) } let_it_be(:project_snippet) { create(:project_snippet, :repository, project: project, author: project.first_owner) } @@ -216,7 +220,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default]) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:finish!) @@ -300,7 +304,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end @@ -322,7 +326,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end @@ -343,7 +347,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end @@ -367,7 +371,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end @@ -387,7 +391,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) - expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) + expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:finish!) end end diff --git a/spec/lib/banzai/filter/code_language_filter_spec.rb b/spec/lib/banzai/filter/code_language_filter_spec.rb index 25f844ee575..d6be088eaff 100644 --- a/spec/lib/banzai/filter/code_language_filter_spec.rb +++ b/spec/lib/banzai/filter/code_language_filter_spec.rb @@ -27,7 +27,7 @@ RSpec.describe Banzai::Filter::CodeLanguageFilter, feature_category: :team_plann end end - context 'when lang is specified' do + context 'when lang is specified on `pre`' do it 'adds data-canonical-lang and removes lang attribute' do result = filter('
def fun end
') @@ -36,19 +36,39 @@ RSpec.describe Banzai::Filter::CodeLanguageFilter, feature_category: :team_plann end end - context 'when lang has extra params' do - let(:lang_params) { 'foo-bar-kux' } - let(:xss_lang) { %(ruby data-meta="foo-bar-kux"<script>alert(1)</script>) } + context 'when lang is specified on `code`' do + it 'adds data-canonical-lang to `pre` and removes lang attribute' do + result = filter('
def fun end
') + + expect(result.to_html.delete("\n")) + .to eq('
def fun end
') + end + end - it 'includes data-lang-params tag with extra information and removes data-meta' do - expected_result = <<~HTML + context 'when lang has extra params' do + let_it_be(:lang_params) { 'foo-bar-kux' } + let_it_be(:xss_lang) { %(ruby data-meta="foo-bar-kux"<script>alert(1)</script>) } + let_it_be(:expected_result) do + <<~HTML
         This is a test
HTML + end + + context 'when lang is specified on `pre`' do + it 'includes data-lang-params tag with extra information and removes data-meta' do + result = filter(%(
This is a test
)) + + expect(result.to_html.delete("\n")).to eq(expected_result.delete("\n")) + end + end - result = filter(%(
This is a test
)) + context 'when lang is specified on `code`' do + it 'includes data-lang-params tag with extra information and removes data-meta' do + result = filter(%(
This is a test
)) - expect(result.to_html.delete("\n")).to eq(expected_result.delete("\n")) + expect(result.to_html.delete("\n")).to eq(expected_result.delete("\n")) + end end include_examples 'XSS prevention', 'ruby' diff --git a/spec/lib/banzai/filter/inline_diff_filter_spec.rb b/spec/lib/banzai/filter/inline_diff_filter_spec.rb index 1388a9053d9..89ee17837e0 100644 --- a/spec/lib/banzai/filter/inline_diff_filter_spec.rb +++ b/spec/lib/banzai/filter/inline_diff_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::InlineDiffFilter do +RSpec.describe Banzai::Filter::InlineDiffFilter, feature_category: :source_code_management do include FilterSpecHelper it 'adds inline diff span tags for deletions when using square brackets' do diff --git a/spec/lib/bitbucket/representation/pull_request_spec.rb b/spec/lib/bitbucket/representation/pull_request_spec.rb index f39222805d0..9ebf59ecf82 100644 --- a/spec/lib/bitbucket/representation/pull_request_spec.rb +++ b/spec/lib/bitbucket/representation/pull_request_spec.rb @@ -56,4 +56,55 @@ RSpec.describe Bitbucket::Representation::PullRequest, feature_category: :import describe '#updated_at' do it { expect(described_class.new('updated_on' => '2023-01-01').updated_at).to eq('2023-01-01') } end + + describe '#merge_commit_sha' do + it { expect(described_class.new('merge_commit' => { 'hash' => 'SHA' }).merge_commit_sha).to eq('SHA') } + it { expect(described_class.new({}).merge_commit_sha).to be_nil } + end + + describe '#to_hash' do + it do + raw = { + 'id' => 11, + 'description' => 'description', + 'author' => { 'nickname' => 'user-1' }, + 'state' => 'MERGED', + 'created_on' => 'created-at', + 'updated_on' => 'updated-at', + 'title' => 'title', + 'source' => { + 'branch' => { 'name' => 'source-branch-name' }, + 'commit' => { 'hash' => 'source-commit-hash' } + }, + 'destination' => { + 'branch' => { 'name' => 'destination-branch-name' }, + 'commit' => { 'hash' => 'destination-commit-hash' } + }, + 'merge_commit' => { 'hash' => 'merge-commit-hash' }, + 'reviewers' => [ + { + 'username' => 'user-2' + } + ] + } + + expected_hash = { + author: 'user-1', + created_at: 'created-at', + description: 'description', + iid: 11, + source_branch_name: 'source-branch-name', + source_branch_sha: 'source-commit-hash', + merge_commit_sha: 'merge-commit-hash', + state: 'merged', + target_branch_name: 'destination-branch-name', + target_branch_sha: 'destination-commit-hash', + title: 'title', + updated_at: 'updated-at', + reviewers: ['user-2'] + } + + expect(described_class.new(raw).to_hash).to eq(expected_hash) + end + end end diff --git a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb index bcc2d6fd5ed..4a97e092141 100644 --- a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb +++ b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb @@ -41,7 +41,17 @@ RSpec.describe BulkImports::Common::Graphql::GetMembersQuery, feature_category: it 'queries group & group members' do expect(query.to_s).to include('group') expect(query.to_s).to include('groupMembers') - expect(query.to_s).to include('SHARED_FROM_GROUPS') + expect(query.to_s).to include('DIRECT INHERITED') + end + + context "when source version is past 14.7.0" do + before do + entity.bulk_import.update!(source_version: "14.8.0") + end + + it 'includes SHARED_FROM_GROUPS' do + expect(query.to_s).to include('DIRECT INHERITED SHARED_FROM_GROUPS') + end end end @@ -51,7 +61,17 @@ RSpec.describe BulkImports::Common::Graphql::GetMembersQuery, feature_category: it 'queries project & project members' do expect(query.to_s).to include('project') expect(query.to_s).to include('projectMembers') - expect(query.to_s).to include('INVITED_GROUPS SHARED_INTO_ANCESTORS') + expect(query.to_s).to include('DIRECT INHERITED INVITED_GROUPS') + end + + context "when source version is at least 16.0.0" do + before do + entity.bulk_import.update!(source_version: "16.0.0") + end + + it 'includes SHARED_INTO_ANCESTORS' do + expect(query.to_s).to include('DIRECT INHERITED INVITED_GROUPS SHARED_INTO_ANCESTORS') + end end end end diff --git a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb index dc17dc594a8..8ca74565788 100644 --- a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb @@ -2,9 +2,9 @@ require 'spec_helper' -RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do +RSpec.describe BulkImports::Common::Pipelines::EntityFinisher, feature_category: :importers do it 'updates the entity status to finished' do - entity = create(:bulk_import_entity, :started) + entity = create(:bulk_import_entity, :project_entity, :started) pipeline_tracker = create(:bulk_import_tracker, entity: entity) context = BulkImports::Pipeline::Context.new(pipeline_tracker) subject = described_class.new(context) @@ -24,7 +24,7 @@ RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do ) end - expect(context.portable).to receive(:try).with(:after_import) + expect(BulkImports::FinishProjectImportWorker).to receive(:perform_async).with(entity.project_id) expect { subject.run } .to change(entity, :status_name).to(:finished) diff --git a/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb index 1c9ed4f0f97..4565de32c70 100644 --- a/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb +++ b/spec/lib/bulk_imports/common/transformers/member_attributes_transformer_spec.rb @@ -85,14 +85,15 @@ RSpec.describe BulkImports::Common::Transformers::MemberAttributesTransformer, f end end - describe 'source user id caching' do + describe 'source user id and username caching' do context 'when user gid is present' do - it 'caches source user id' do + it 'caches source user id and username' do gid = 'gid://gitlab/User/7' data = member_data(email: user.email, gid: gid) expect_next_instance_of(BulkImports::UsersMapper) do |mapper| expect(mapper).to receive(:cache_source_user_id).with('7', user.id) + expect(mapper).to receive(:cache_source_username).with('source_username', user.username) end subject.transform(context, data) @@ -108,6 +109,35 @@ RSpec.describe BulkImports::Common::Transformers::MemberAttributesTransformer, f subject.transform(context, data) end end + + context 'when username is nil' do + it 'caches source user id only' do + gid = 'gid://gitlab/User/7' + data = nil_username_member_data(email: user.email, gid: gid) + + expect_next_instance_of(BulkImports::UsersMapper) do |mapper| + expect(mapper).to receive(:cache_source_user_id).with('7', user.id) + expect(mapper).not_to receive(:cache_source_username) + end + + subject.transform(context, data) + end + end + + context 'when source username matches destination username' do + it 'caches source user id only' do + gid = 'gid://gitlab/User/7' + data = member_data(email: user.email, gid: gid) + data["user"]["username"] = user.username + + expect_next_instance_of(BulkImports::UsersMapper) do |mapper| + expect(mapper).to receive(:cache_source_user_id).with('7', user.id) + expect(mapper).not_to receive(:cache_source_username) + end + + subject.transform(context, data) + end + end end end end @@ -136,7 +166,24 @@ RSpec.describe BulkImports::Common::Transformers::MemberAttributesTransformer, f }, 'user' => { 'user_gid' => gid, - 'public_email' => email + 'public_email' => email, + 'username' => 'source_username' + } + } + end + + def nil_username_member_data(email: '', gid: nil, access_level: 30) + { + 'created_at' => '2020-01-01T00:00:00Z', + 'updated_at' => '2020-01-01T00:00:00Z', + 'expires_at' => nil, + 'access_level' => { + 'integer_value' => access_level + }, + 'user' => { + 'user_gid' => gid, + 'public_email' => email, + 'username' => nil } } end diff --git a/spec/lib/bulk_imports/file_downloads/validations_spec.rb b/spec/lib/bulk_imports/file_downloads/validations_spec.rb index 85f45c2a8f0..95f3f78310f 100644 --- a/spec/lib/bulk_imports/file_downloads/validations_spec.rb +++ b/spec/lib/bulk_imports/file_downloads/validations_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::FileDownloads::Validations do +RSpec.describe BulkImports::FileDownloads::Validations, feature_category: :importers do let(:dummy_instance) { dummy_class.new } let(:dummy_class) do Class.new do diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb index 7d1f9ae5da0..87b64ef198e 100644 --- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb +++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb @@ -84,6 +84,33 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader, feature_category: :imp include_examples 'calls Group Create Service to create a new group' end + + context 'when user does not have 2FA enabled' do + before do + allow(user).to receive(:two_factor_enabled?).and_return(false) + end + + context 'when require_two_factor_authentication is not passed' do + include_examples 'calls Group Create Service to create a new group' + end + + context 'when require_two_factor_authentication is false' do + let(:data) { { 'require_two_factor_authentication' => false, 'path' => 'test' } } + + include_examples 'calls Group Create Service to create a new group' + end + + context 'when require_two_factor_authentication is true' do + let(:data) { { 'require_two_factor_authentication' => true, 'path' => 'test' } } + + it 'does not create new group' do + expect(::Groups::CreateService).not_to receive(:new) + + expect { subject.load(context, data) } + .to raise_error(described_class::GroupCreationError, 'User requires Two-Factor Authentication') + end + end + end end context 'when user cannot create group' do diff --git a/spec/lib/bulk_imports/network_error_spec.rb b/spec/lib/bulk_imports/network_error_spec.rb index 54d6554df96..d5e2b739c8f 100644 --- a/spec/lib/bulk_imports/network_error_spec.rb +++ b/spec/lib/bulk_imports/network_error_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache do +RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache, feature_category: :importers do let(:tracker) { double(id: 1, stage: 2, entity: double(id: 3)) } describe '.new' do @@ -65,10 +65,32 @@ RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache do end describe '#retry_delay' do - it 'returns the default value when there is not a rate limit error' do - exception = described_class.new('foo') + context 'when the exception is not a rate limit error' do + let(:exception) { described_class.new('Error!') } - expect(exception.retry_delay).to eq(described_class::DEFAULT_RETRY_DELAY_SECONDS.seconds) + it 'returns the default value' do + expect(exception.retry_delay).to eq(described_class::DEFAULT_RETRY_DELAY_SECONDS.seconds) + end + + context 'when the exception is a decompression error' do + before do + allow(exception).to receive(:cause).and_return(Zlib::Error.new('Error!')) + end + + it 'returns the exception delay value' do + expect(exception.retry_delay).to eq(60.seconds) + end + end + + context 'when the exception is a no space left error' do + before do + allow(exception).to receive(:cause).and_return(Errno::ENOSPC.new('Error!')) + end + + it 'returns the exception delay value' do + expect(exception.retry_delay).to eq(120.seconds) + end + end end context 'when the exception is a rate limit error' do diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb index e66f2d26911..2f54ab111c8 100644 --- a/spec/lib/bulk_imports/pipeline/runner_spec.rb +++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Pipeline::Runner do +RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do let(:extractor) do Class.new do def initialize(options = {}); end diff --git a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb index a0789522ea8..fd13c10d61e 100644 --- a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb @@ -164,6 +164,40 @@ RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do expect(note.award_emoji.first.name).to eq('clapper') end end + + context "when importing an issue with one award emoji and other relations with one item" do + let(:issue_attributes) do + { + "notes" => [ + { + 'note' => 'Description changed', + 'author_id' => 22, + 'author' => { + 'name' => 'User 22' + }, + 'updated_at' => '2016-06-14T15:02:47.770Z' + } + ], + 'award_emoji' => [ + { + 'name' => 'thumbsup', + 'user_id' => 22 + } + ] + } + end + + it 'saves properly' do + issue = project.issues.last + notes = issue.notes + + aggregate_failures do + expect(notes.count).to eq 1 + expect(notes[0].note).to include("Description changed") + expect(issue.award_emoji.first.name).to eq "thumbsup" + end + end + end end end end diff --git a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb index 3a808851f81..af8bce47c3d 100644 --- a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb @@ -24,7 +24,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat create( :merge_request, source_project: project, - description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1' + description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!' ) end @@ -33,7 +33,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat :note, project: project, noteable: issue, - note: 'https://my.gitlab.com/source/full/path/-/issues/1' + note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.' ) end @@ -42,7 +42,16 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat :note, project: project, noteable: mr, - note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1' + note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username' + ) + end + + let(:interchanged_usernames) do + create( + :note, + project: project, + noteable: mr, + note: '@manuelgrabowski-admin, @boaty-mc-boatface' ) end @@ -53,19 +62,48 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat project: project, system: true, noteable: issue, - note: "mentioned in merge request !#{mr.iid}", + note: "mentioned in merge request !#{mr.iid} created by @old_username", note_html: old_note_html ) end + let(:username_system_note) do + create( + :note, + project: project, + system: true, + noteable: issue, + note: "mentioned in merge request created by @source_username.", + note_html: 'empty' + ) + end + subject(:pipeline) { described_class.new(context) } before do project.add_owner(user) + + allow(Gitlab::Cache::Import::Caching) + .to receive(:values_from_hash) + .and_return({ + 'old_username' => 'new_username', + 'older_username' => 'newer_username', + 'source_username' => 'destination_username', + 'bob' => 'alice-gdk', + 'alice' => 'bob-gdk', + 'manuelgrabowski' => 'manuelgrabowski-admin', + 'manuelgrabowski-admin' => 'manuelgrabowski', + 'boaty-mc-boatface' => 'boatymcboatface', + 'boatymcboatface' => 'boaty-mc-boatface' + }) end def create_project_data - [issue, mr, issue_note, mr_note, system_note] + [issue, mr, issue_note, mr_note, system_note, username_system_note] + end + + def create_username_project_data + [username_system_note] end describe '#extract' do @@ -75,11 +113,14 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat extracted_data = subject.extract(context) expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData) - expect(extracted_data.data).to contain_exactly(issue_note, mr, issue, mr_note) + expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note) expect(system_note.note_html).not_to eq(old_note_html) expect(system_note.note_html) - .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}

") + .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}") .and include(project.full_path.to_s) + .and include("@old_username") + expect(username_system_note.note_html) + .to include("@source_username") end context 'when object body is nil' do @@ -94,9 +135,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat end describe '#transform' do - it 'updates matching urls with new ones' do + it 'updates matching urls and usernames with new ones' do transformed_mr = subject.transform(context, mr) transformed_note = subject.transform(context, mr_note) + transformed_issue = subject.transform(context, issue) + transformed_issue_note = subject.transform(context, issue_note) + transformed_system_note = subject.transform(context, system_note) + transformed_username_system_note = subject.transform(context, username_system_note) expected_url = URI('') expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http' @@ -104,11 +149,44 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat expected_url.port = ::Gitlab.config.gitlab.port expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}" - expect(transformed_mr.description).to eq(expected_url.to_s) - expect(transformed_note.note).to eq(expected_url.to_s) + expect(transformed_issue_note.note).not_to include("@older_username") + expect(transformed_mr.description).not_to include("@source_username") + expect(transformed_system_note.note).not_to include("@old_username") + expect(transformed_username_system_note.note).not_to include("@source_username") + + expect(transformed_issue.description).to eq('http://localhost:80/namespace1/project-1/-/issues/1') + expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!") + expect(transformed_note.note).to eq("#{expected_url} @same_username") + expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.") + expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username") + expect(transformed_username_system_note.note).to include("@destination_username.") end - context 'when object does not have reference' do + it 'handles situations where old usernames are substrings of new usernames' do + transformed_mr = subject.transform(context, mr) + + expect(transformed_mr.description).to include("@alice-gdk") + expect(transformed_mr.description).not_to include("@bob-gdk-gdk") + end + + it 'handles situations where old and new usernames are interchanged' do + # e.g + # |------------------------|-------------------------| + # | old_username | new_username | + # |------------------------|-------------------------| + # | @manuelgrabowski-admin | @manuelgrabowski | + # | @manuelgrabowski | @manuelgrabowski-admin | + # |------------------------|-------------------------| + + transformed_interchanged_usernames = subject.transform(context, interchanged_usernames) + + expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski") + expect(transformed_interchanged_usernames.note).to include("@boatymcboatface") + expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin") + expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface") + end + + context 'when object does not have reference or username' do it 'returns object unchanged' do issue.update!(description: 'foo') @@ -118,35 +196,35 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat end end - context 'when there are not matched urls' do - let(:url) { 'https://my.gitlab.com/another/project/path/-/issues/1' } + context 'when there are not matched urls or usernames' do + let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' } shared_examples 'returns object unchanged' do it 'returns object unchanged' do - issue.update!(description: url) + issue.update!(description: description) transformed_issue = subject.transform(context, issue) - expect(transformed_issue.description).to eq(url) + expect(transformed_issue.description).to eq(description) end end include_examples 'returns object unchanged' context 'when url path does not start with source full path' do - let(:url) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' } + let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' } include_examples 'returns object unchanged' end context 'when host does not match and url path starts with source full path' do - let(:url) { 'https://another.gitlab.com/source/full/path/-/issues/1' } + let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' } include_examples 'returns object unchanged' end context 'when url does not match at all' do - let(:url) { 'https://website.example/foo/bar' } + let(:description) { 'https://website.example/foo/bar' } include_examples 'returns object unchanged' end @@ -156,13 +234,22 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat describe '#load' do it 'saves the object when object body changed' do transformed_issue = subject.transform(context, issue) - transformed_note = subject.transform(context, issue_note) + transformed_note = subject.transform(context, mr_note) + transformed_mr = subject.transform(context, mr) + transformed_issue_note = subject.transform(context, issue_note) + transformed_system_note = subject.transform(context, system_note) expect(transformed_issue).to receive(:save!) expect(transformed_note).to receive(:save!) + expect(transformed_mr).to receive(:save!) + expect(transformed_issue_note).to receive(:save!) + expect(transformed_system_note).to receive(:save!) subject.load(context, transformed_issue) subject.load(context, transformed_note) + subject.load(context, transformed_mr) + subject.load(context, transformed_issue_note) + subject.load(context, transformed_system_note) end context 'when object body is not changed' do diff --git a/spec/lib/bulk_imports/users_mapper_spec.rb b/spec/lib/bulk_imports/users_mapper_spec.rb index e6357319d05..dc2beb42080 100644 --- a/spec/lib/bulk_imports/users_mapper_spec.rb +++ b/spec/lib/bulk_imports/users_mapper_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::UsersMapper do +RSpec.describe BulkImports::UsersMapper, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:import) { create(:bulk_import, user: user) } let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) } @@ -34,6 +34,22 @@ RSpec.describe BulkImports::UsersMapper do end end + describe '#map_usernames' do + context 'when value for specified key exists' do + it 'returns a map of source & destination usernames from redis' do + allow(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).and_return({ "source_username" => "destination_username" }) + + expect(subject.map_usernames).to eq({ "source_username" => "destination_username" }) + end + end + + context 'when value for specified key does not exist' do + it 'returns nil' do + expect(subject.map_usernames[:non_existent_key]).to be_nil + end + end + end + describe '#default_user_id' do it 'returns current user id' do expect(subject.default_user_id).to eq(user.id) @@ -65,4 +81,12 @@ RSpec.describe BulkImports::UsersMapper do subject.cache_source_user_id(1, 2) end end + + describe '#cache_source_username' do + it 'caches provided source & destination usernames in redis' do + expect(Gitlab::Cache::Import::Caching).to receive(:hash_add).with("bulk_imports/#{import.id}/#{entity.id}/source_usernames", 'source', 'destination') + + subject.cache_source_username('source', 'destination') + end + end end diff --git a/spec/lib/click_house/bind_index_manager_spec.rb b/spec/lib/click_house/bind_index_manager_spec.rb deleted file mode 100644 index 1c659017c63..00000000000 --- a/spec/lib/click_house/bind_index_manager_spec.rb +++ /dev/null @@ -1,33 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe ClickHouse::BindIndexManager, feature_category: :database do - describe '#next_bind_str' do - context 'when initialized without a start index' do - let(:bind_manager) { described_class.new } - - it 'starts from index 1 by default' do - expect(bind_manager.next_bind_str).to eq('$1') - end - - it 'increments the bind string on subsequent calls' do - bind_manager.next_bind_str - expect(bind_manager.next_bind_str).to eq('$2') - end - end - - context 'when initialized with a start index' do - let(:bind_manager) { described_class.new(2) } - - it 'starts from the given index' do - expect(bind_manager.next_bind_str).to eq('$2') - end - - it 'increments the bind string on subsequent calls' do - bind_manager.next_bind_str - expect(bind_manager.next_bind_str).to eq('$3') - end - end - end -end diff --git a/spec/lib/click_house/query_builder_spec.rb b/spec/lib/click_house/query_builder_spec.rb index 9e3f1118eeb..f5e1d53e7c1 100644 --- a/spec/lib/click_house/query_builder_spec.rb +++ b/spec/lib/click_house/query_builder_spec.rb @@ -288,7 +288,8 @@ RSpec.describe ClickHouse::QueryBuilder, feature_category: :database do describe '#to_redacted_sql' do it 'calls ::ClickHouse::Redactor correctly' do - expect(::ClickHouse::Redactor).to receive(:redact).with(builder) + expect(::ClickHouse::Redactor).to receive(:redact).with(builder, + an_instance_of(ClickHouse::Client::BindIndexManager)) builder.to_redacted_sql end @@ -331,4 +332,27 @@ RSpec.describe ClickHouse::QueryBuilder, feature_category: :database do expect(sql).to eq(expected_sql) end end + + context 'when combining with a raw query' do + it 'correctly generates the SQL query' do + raw_query = 'SELECT * FROM isues WHERE title = {title:String} AND id IN ({query:Subquery})' + placeholders = { + title: "'test'", + query: builder.select(:id).where(column1: 'value1', column2: 'value2') + } + + query = ClickHouse::Client::Query.new(raw_query: raw_query, placeholders: placeholders) + expected_sql = "SELECT * FROM isues WHERE title = {title:String} AND id IN (SELECT \"test_table\".\"id\" " \ + "FROM \"test_table\" WHERE \"test_table\".\"column1\" = 'value1' AND " \ + "\"test_table\".\"column2\" = 'value2')" + + expect(query.to_sql).to eq(expected_sql) + + expected_redacted_sql = "SELECT * FROM isues WHERE title = $1 AND id IN (SELECT \"test_table\".\"id\" " \ + "FROM \"test_table\" WHERE \"test_table\".\"column1\" = $2 AND " \ + "\"test_table\".\"column2\" = $3)" + + expect(query.to_redacted_sql).to eq(expected_redacted_sql) + end + end end diff --git a/spec/lib/click_house/record_sync_context_spec.rb b/spec/lib/click_house/record_sync_context_spec.rb new file mode 100644 index 00000000000..7873796cd9c --- /dev/null +++ b/spec/lib/click_house/record_sync_context_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ClickHouse::RecordSyncContext, feature_category: :value_stream_management do + let(:records) { [Issue.new(id: 1), Issue.new(id: 2), Issue.new(id: 3), Issue.new(id: 4)] } + + subject(:sync_context) { described_class.new(last_record_id: 0, max_records_per_batch: 3) } + + it 'allows processing 3 records per batch' do + records.take(3).each do |record| + sync_context.last_processed_id = record.id + end + + expect(sync_context).to be_record_limit_reached + expect(sync_context.last_processed_id).to eq(3) + + expect { sync_context.new_batch! }.to change { sync_context.record_count_in_current_batch }.from(3).to(0) + + expect(sync_context).not_to be_record_limit_reached + + records.take(3).each do |record| + sync_context.last_processed_id = record.id + end + + expect(sync_context).to be_record_limit_reached + end + + it 'sets the no more records flag' do + expect { sync_context.no_more_records! }.to change { sync_context.no_more_records? }.from(false).to(true) + end +end diff --git a/spec/lib/click_house/sync_cursor_spec.rb b/spec/lib/click_house/sync_cursor_spec.rb new file mode 100644 index 00000000000..43ffaa76e1d --- /dev/null +++ b/spec/lib/click_house/sync_cursor_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ClickHouse::SyncCursor, feature_category: :value_stream_management, click_house: {} do + def value + ClickHouse::SyncCursor.cursor_for(:my_table) + end + + context 'when cursor is empty' do + it 'returns the default value: 0' do + expect(value).to eq(0) + end + end + + context 'when cursor is present' do + it 'updates and returns the current cursor value' do + described_class.update_cursor_for(:my_table, 1111) + + expect(value).to eq(1111) + + described_class.update_cursor_for(:my_table, 2222) + + expect(value).to eq(2222) + end + end + + context 'when updating a different cursor' do + it 'does not affect the other cursors' do + described_class.update_cursor_for(:other_table, 1111) + + expect(value).to eq(0) + end + end +end diff --git a/spec/lib/constraints/activity_pub_constrainer_spec.rb b/spec/lib/constraints/activity_pub_constrainer_spec.rb new file mode 100644 index 00000000000..2a3d23501a9 --- /dev/null +++ b/spec/lib/constraints/activity_pub_constrainer_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Constraints::ActivityPubConstrainer, feature_category: :groups_and_projects do + subject(:constraint) { described_class.new } + + describe '#matches?' do + subject { constraint.matches?(request) } + + let(:request) { ActionDispatch::Request.new(headers) } + + ['application/ld+json; profile="https://www.w3.org/ns/activitystreams"', 'application/activity+json'].each do |mime| + context "when Accept header is #{mime}" do + let(:headers) { { 'HTTP_ACCEPT' => mime } } + + it 'matches the header' do + is_expected.to be_truthy + end + end + + context "when Content-Type header is #{mime}" do + let(:headers) { { 'CONTENT_TYPE' => mime } } + + it 'matches the header' do + is_expected.to be_truthy + end + end + end + + context 'when Accept and Content-Type headers are missing' do + let(:headers) { {} } + + it 'does not match' do + is_expected.to be_falsey + end + end + end +end diff --git a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt index 2728d65d54b..185f6deeade 100644 --- a/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt +++ b/spec/lib/generators/batched_background_migration/expected_files/my_batched_migration_spec_matcher.txt @@ -2,6 +2,6 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::MyBatchedMigration, schema: [0-9]+, feature_category: :database do # rubocop:disable Layout/LineLength +RSpec.describe Gitlab::BackgroundMigration::MyBatchedMigration, feature_category: :database do # rubocop:disable Layout/LineLength # Tests go here end diff --git a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb index e67e48d83a3..b75d75107ee 100644 --- a/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb +++ b/spec/lib/generators/gitlab/analytics/internal_events_generator_spec.rb @@ -141,8 +141,10 @@ RSpec.describe Gitlab::Analytics::InternalEventsGenerator, :silence_stdout, feat .to receive(:known_event?).with(event).and_return(true) end - it 'raises error' do - expect { described_class.new([], options).invoke_all }.to raise_error(RuntimeError) + it 'does not create event definition' do + described_class.new([], options).invoke_all + + expect(event_definition_path).to eq(nil) end end diff --git a/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb b/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb index 7c7ca8207ff..229100186be 100644 --- a/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb +++ b/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb @@ -33,8 +33,6 @@ feature_category: :continuous_integration do SQL end - let_it_be(:destination_root) { File.expand_path("../tmp", __dir__) } - let(:generator_config) { { destination_root: destination_root } } let(:generator_args) { ['--source', '_test_tmp_metadata', '--target', '_test_tmp_builds', '--database', 'main'] } @@ -124,4 +122,8 @@ feature_category: :continuous_integration do def schema_migrate_down! # no-op end + + def destination_root + File.expand_path("../tmp", __dir__) + end end diff --git a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb index 62a52ee5fb9..740cfa767e4 100644 --- a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb +++ b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb @@ -6,7 +6,10 @@ RSpec.describe Gitlab::SnowplowEventDefinitionGenerator, :silence_stdout, featur let(:ce_temp_dir) { Dir.mktmpdir } let(:ee_temp_dir) { Dir.mktmpdir } let(:timestamp) { Time.now.utc.strftime('%Y%m%d%H%M%S') } - let(:generator_options) { { 'category' => 'Groups::EmailCampaignsController', 'action' => 'click' } } + + let(:generator_options) do + { 'category' => 'Projects::Pipelines::EmailCampaignsController', 'action' => 'click' } + end before do stub_const("#{described_class}::CE_DIR", ce_temp_dir) diff --git a/spec/lib/gitlab/auth/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb index 226669bab33..291cfb951c3 100644 --- a/spec/lib/gitlab/auth/o_auth/provider_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Auth::OAuth::Provider do +RSpec.describe Gitlab::Auth::OAuth::Provider, feature_category: :system_access do describe '.enabled?' do before do allow(described_class).to receive(:providers).and_return([:ldapmain, :google_oauth2]) @@ -62,25 +62,27 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do context 'for an OmniAuth provider' do before do - provider = ActiveSupport::InheritableOptions.new( + provider = GitlabSettings::Options.new( name: 'google_oauth2', app_id: 'asd123', app_secret: 'asd123' ) - allow(Gitlab.config.omniauth).to receive(:providers).and_return([provider]) + openid_connect = GitlabSettings::Options.new(name: 'openid_connect') + + stub_omniauth_setting(providers: [provider, openid_connect]) end context 'when the provider exists' do - subject { described_class.config_for('google_oauth2') } + subject(:config) { described_class.config_for('google_oauth2') } it 'returns the config' do - expect(subject).to be_a(ActiveSupport::InheritableOptions) + expect(config).to be_a(GitlabSettings::Options) end it 'merges defaults with the given configuration' do defaults = Gitlab::OmniauthInitializer.default_arguments_for('google_oauth2').deep_stringify_keys - expect(subject['args']).to include(defaults) + expect(config['args']).to include(defaults) end end diff --git a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb index e5bc51edc2d..f12ed5a0e9c 100644 --- a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb +++ b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb @@ -30,7 +30,7 @@ RSpec.describe Gitlab::Auth::UserAccessDeniedReason do end context 'when the user is internal' do - let(:user) { User.ghost } + let(:user) { Users::Internal.ghost } it { is_expected.to match /This action cannot be performed by internal users/ } end diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index c4fa8513618..8da617175ca 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -10,7 +10,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate describe 'constants' do it 'API_SCOPES contains all scopes for API access' do - expect(subject::API_SCOPES).to match_array %i[api read_user read_api create_runner] + expect(subject::API_SCOPES).to match_array %i[api read_user read_api create_runner k8s_proxy] end it 'ADMIN_SCOPES contains all scopes for ADMIN access' do @@ -40,29 +40,29 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate end it 'contains all non-default scopes' do - expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner] + expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features] end it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do user = build_stubbed(:user, admin: false) - expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner] + expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features] end it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do user = build_stubbed(:user, admin: true) - expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner] + expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features] end it 'contains for project all resource bot scopes without observability scopes' do - expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner] + expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features] end it 'contains for group all resource bot scopes' do group = build_stubbed(:group) - expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner] + expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features] end it 'contains for unsupported type no scopes' do @@ -70,7 +70,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate end it 'optional_scopes contains all non-default scopes' do - expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner] + expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features] end context 'with observability_group_tab feature flag' do @@ -82,7 +82,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate it 'contains for group all resource bot scopes without observability scopes' do group = build_stubbed(:group) - expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner] + expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features] end end @@ -94,23 +94,23 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate end it 'contains for other group all resource bot scopes including observability scopes' do - expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner] + expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability create_runner k8s_proxy ai_features] end it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do user = build_stubbed(:user, admin: true) - expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner] + expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features] end it 'contains for project all resource bot scopes without observability scopes' do - expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner] + expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features] end it 'contains for other group all resource bot scopes without observability scopes' do other_group = build_stubbed(:group) - expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner] + expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry create_runner k8s_proxy ai_features] end end end diff --git a/spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb new file mode 100644 index 00000000000..fc4597fbb96 --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb @@ -0,0 +1,101 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillHasMergeRequestOfVulnerabilityReads, schema: 20230907155247, feature_category: :database do # rubocop:disable Layout/LineLength + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:users) { table(:users) } + let(:scanners) { table(:vulnerability_scanners) } + let(:vulnerabilities) { table(:vulnerabilities) } + let(:vulnerability_reads) { table(:vulnerability_reads) } + let(:merge_requests) { table(:merge_requests) } + let(:merge_request_links) { table(:vulnerability_merge_request_links) } + + let(:namespace) { namespaces.create!(name: 'user', path: 'user') } + let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) } + let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) } + let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') } + + let(:vulnerability) do + vulnerabilities.create!( + project_id: project.id, + author_id: user.id, + title: 'test', + severity: 1, + confidence: 1, + report_type: 1 + ) + end + + let(:merge_request) do + merge_requests.create!( + target_project_id: project.id, + source_branch: "other", + target_branch: "main", + author_id: user.id, + title: 'Feedback Merge Request' + ) + end + + let!(:vulnerability_read) do + vulnerability_reads.create!( + project_id: project.id, + vulnerability_id: vulnerability.id, + scanner_id: scanner.id, + severity: 1, + report_type: 1, + state: 1, + uuid: SecureRandom.uuid + ) + end + + let!(:merge_request_link) do + merge_request_links.create!( + vulnerability_id: vulnerability.id, merge_request_id: merge_request.id) + end + + subject(:perform_migration) do + described_class.new( + start_id: vulnerability_reads.first.vulnerability_id, + end_id: vulnerability_reads.last.vulnerability_id, + batch_table: :vulnerability_reads, + batch_column: :vulnerability_id, + sub_batch_size: vulnerability_reads.count, + pause_ms: 0, + connection: ActiveRecord::Base.connection + ).perform + end + + before do + # Unset since the trigger already sets during merge_request_link creation. + vulnerability_reads.update_all(has_merge_request: false) + end + + it 'sets the has_merge_request of existing record' do + expect { perform_migration }.to change { vulnerability_read.reload.has_merge_request }.from(false).to(true) + end + + it 'does not modify has_merge_request of other vulnerabilities which do not have merge request' do + vulnerability_2 = vulnerabilities.create!( + project_id: project.id, + author_id: user.id, + title: 'test 2', + severity: 1, + confidence: 1, + report_type: 1 + ) + + vulnerability_read_2 = vulnerability_reads.create!( + project_id: project.id, + vulnerability_id: vulnerability_2.id, + scanner_id: scanner.id, + severity: 1, + report_type: 1, + state: 1, + uuid: SecureRandom.uuid + ) + + expect { perform_migration }.not_to change { vulnerability_read_2.reload.has_merge_request }.from(false) + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb b/spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb new file mode 100644 index 00000000000..3f0bd417955 --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_nuget_normalized_version_spec.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillNugetNormalizedVersion, schema: 20230811103457, + feature_category: :package_registry do + let(:packages_nuget_metadata) { table(:packages_nuget_metadata) } + let(:versions) do + { + '1' => '1.0.0', + '1.0' => '1.0.0', + '1.0.0' => '1.0.0', + '1.00' => '1.0.0', + '1.00.01' => '1.0.1', + '1.01.1' => '1.1.1', + '1.0.0.0' => '1.0.0', + '1.0.01.0' => '1.0.1', + '1.0.7+r3456' => '1.0.7', + '1.0.0-Alpha' => '1.0.0-alpha', + '1.00.05-alpha.0' => '1.0.5-alpha.0' + } + end + + let!(:migration_attrs) do + { + start_id: packages_nuget_metadata.minimum(:package_id), + end_id: packages_nuget_metadata.maximum(:package_id), + batch_table: :packages_nuget_metadata, + batch_column: :package_id, + sub_batch_size: 1000, + pause_ms: 0, + connection: ApplicationRecord.connection + } + end + + let(:migration) { described_class.new(**migration_attrs) } + let(:packages) { table(:packages_packages) } + + let(:namespace) { table(:namespaces).create!(name: 'project', path: 'project', type: 'Project') } + let(:project) do + table(:projects).create!(name: 'project', path: 'project', project_namespace_id: namespace.id, + namespace_id: namespace.id) + end + + let(:package_ids) { [] } + + subject(:perform_migration) { migration.perform } + + before do + versions.each_key do |version| + packages.create!(name: 'test', version: version, package_type: 4, project_id: project.id).tap do |package| + package_ids << package.id + packages_nuget_metadata.create!(package_id: package.id) + end + end + end + + it 'executes 5 queries and updates the normalized_version column' do + queries = ActiveRecord::QueryRecorder.new do + perform_migration + end + + # each_batch lower bound query + # each_batch upper bound query + # SELECT packages_nuget_metadata.package_id FROM packages_nuget_metadata.... + # SELECT packages_packages.id, packages_packages.version FROM packages_packages.... + # UPDATE packages_nuget_metadata SET normalized_version =.... + expect(queries.count).to eq(5) + + expect( + packages_nuget_metadata.where(package_id: package_ids).pluck(:normalized_version) + ).to match_array(versions.values) + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb new file mode 100644 index 00000000000..2884fb9b10b --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_project_statistics_storage_size_with_recent_size_spec.rb @@ -0,0 +1,165 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillProjectStatisticsStorageSizeWithRecentSize, + schema: 20230823090001, + feature_category: :consumables_cost_management do + include MigrationHelpers::ProjectStatisticsHelper + + include_context 'when backfilling project statistics' + + let(:recent_size_enabled_at) { described_class::RECENT_OBJECTS_SIZE_ENABLED_AT } + let(:default_stats) do + { + repository_size: 1, + wiki_size: 1, + lfs_objects_size: 1, + build_artifacts_size: 1, + packages_size: 1, + snippets_size: 1, + uploads_size: 1, + storage_size: default_storage_size, + updated_at: recent_size_enabled_at - 1.month + } + end + + describe '#filter_batch' do + let!(:project_statistics) { generate_records(default_projects, project_statistics_table, default_stats) } + let!(:expected) { project_statistics.map(&:id) } + + it 'filters out project_statistics with no repository_size' do + project_statistics_table.create!( + project_id: proj5.id, + namespace_id: proj5.namespace_id, + repository_size: 0, + wiki_size: 1, + lfs_objects_size: 1, + build_artifacts_size: 1, + packages_size: 1, + snippets_size: 1, + uploads_size: 1, + storage_size: 6, + updated_at: recent_size_enabled_at - 1.month + ) + + actual = migration.filter_batch(project_statistics_table).pluck(:id) + + expect(actual).to match_array(expected) + end + + shared_examples 'filters out project_statistics updated since recent objects went live' do + it 'filters out project_statistics updated since recent objects went live' do + project_statistics_table.create!( + project_id: proj5.id, + namespace_id: proj5.namespace_id, + repository_size: 10, + wiki_size: 1, + lfs_objects_size: 1, + build_artifacts_size: 1, + packages_size: 1, + snippets_size: 1, + uploads_size: 1, + storage_size: 6, + updated_at: recent_size_enabled_at + 1.month + ) + + actual = migration.filter_batch(project_statistics_table).pluck(:id) + + expect(actual).to match_array(expected) + end + end + + context 'when on GitLab.com' do + before do + allow(Gitlab).to receive(:org_or_com?).and_return(true) + end + + it_behaves_like 'filters out project_statistics updated since recent objects went live' + end + + context 'when Gitlab.dev_or_test_env? is true ' do + before do + allow(Gitlab).to receive(:dev_or_test_env?).and_return(true) + end + + it_behaves_like 'filters out project_statistics updated since recent objects went live' + end + + context 'when on self-managed' do + before do + allow(Gitlab).to receive(:dev_or_test_env?).and_return(false) + allow(Gitlab).to receive(:org_or_com?).and_return(false) + end + + it 'does not filter out project_statistics updated since recent objects went live' do + latest = project_statistics_table.create!( + project_id: proj5.id, + namespace_id: proj5.namespace_id, + repository_size: 10, + wiki_size: 1, + lfs_objects_size: 1, + build_artifacts_size: 1, + packages_size: 1, + snippets_size: 1, + uploads_size: 1, + storage_size: 6, + updated_at: recent_size_enabled_at + 1.month + ) + + actual = migration.filter_batch(project_statistics_table).pluck(:id) + + expect(actual).to match_array(expected.push(latest.id)) + end + end + end + + describe '#perform' do + subject(:perform_migration) { migration.perform } + + before do + allow_next_instance_of(Repository) do |repo| + allow(repo).to receive(:recent_objects_size).and_return(10) + end + end + + context 'when project_statistics backfill runs' do + before do + generate_records(default_projects, project_statistics_table, default_stats) + allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async) + end + + it 'uses repository#recent_objects_size for repository_size' do + project_statistics = create_project_stats(projects, namespaces, default_stats) + migration = create_migration(end_id: project_statistics.project_id) + + migration.perform + + project_statistics.reload + expect(project_statistics.storage_size).to eq(6 + 10.megabytes) + end + end + + it 'coerces a null wiki_size to 0' do + project_statistics = create_project_stats(projects, namespaces, default_stats, { wiki_size: nil }) + allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async) + migration = create_migration(end_id: project_statistics.project_id) + + migration.perform + + project_statistics.reload + expect(project_statistics.storage_size).to eq(5 + 10.megabytes) + end + + it 'coerces a null snippets_size to 0' do + project_statistics = create_project_stats(projects, namespaces, default_stats, { snippets_size: nil }) + allow(::Namespaces::ScheduleAggregationWorker).to receive(:perform_async) + migration = create_migration(end_id: project_statistics.project_id) + + migration.perform + + project_statistics.reload + expect(project_statistics.storage_size).to eq(5 + 10.megabytes) + end + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb index 9f76e4131b2..06b66b599ab 100644 --- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb @@ -250,7 +250,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat end context 'when user name is invalid' do - let(:user_name) { '.' } + let(:user_name) { ',' } let!(:snippet) { snippets.create!(id: 4, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) } let(:ids) { [4, 4] } @@ -262,7 +262,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat end context 'when both user name and snippet file_name are invalid' do - let(:user_name) { '.' } + let(:user_name) { ',' } let!(:other_user) do users.create!( id: 2, diff --git a/spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb b/spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb new file mode 100644 index 00000000000..b66b930b7ac --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_user_preferences_with_defaults_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillUserPreferencesWithDefaults, + schema: 20230818085219, + feature_category: :user_profile do + let(:user_preferences) { table(:user_preferences) } + let(:users) { table(:users) } + let(:columns) { [:tab_width, :time_display_relative, :render_whitespace_in_code] } + let(:initial_column_values) do + [ + [nil, nil, nil], + [10, nil, nil], + [nil, false, nil], + [nil, nil, true] + ] + .map { |row| columns.zip(row).to_h } + end + + let(:final_column_values) do + [ + [8, true, false], + [10, true, false], + [8, false, false], + [8, true, true] + ] + .map { |row| columns.zip(row).to_h } + end + + subject(:perform_migration) do + described_class + .new( + start_id: user_preferences.minimum(:id), + end_id: user_preferences.maximum(:id), + batch_table: :user_preferences, + batch_column: :id, + sub_batch_size: 2, + pause_ms: 0, + connection: ActiveRecord::Base.connection + ) + .perform + end + + before do + initial_column_values.each_with_index do |attributes, index| + user = users.create!(projects_limit: 1, email: "user#{index}@gitlab.com") + user_preference = user_preferences.create!(attributes.merge(user_id: user.id)) + final_column_values[index].merge!(id: user_preference.id) + end + end + + it 'backfills the null values with the default values' do + perform_migration + + final_column_values.each { |attributes| match_attributes(attributes) } + end + + def match_attributes(attributes) + migrated_user_preference = user_preferences.find(attributes[:id]) + + expect(migrated_user_preference.tab_width).to eq(attributes[:tab_width]) + expect(migrated_user_preference.time_display_relative).to eq(attributes[:time_display_relative]) + expect(migrated_user_preference.render_whitespace_in_code).to eq(attributes[:render_whitespace_in_code]) + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb b/spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb new file mode 100644 index 00000000000..78f36933435 --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_users_with_defaults_spec.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillUsersWithDefaults, + schema: 20230818083610, + feature_category: :user_profile do + let(:users) { table(:users) } + let(:columns) { [:project_view, :hide_no_ssh_key, :hide_no_password, :notified_of_own_activity] } + let(:initial_column_values) do + [ + [nil, nil, nil, nil], + [0, nil, nil, nil], + [nil, true, nil, nil], + [nil, nil, true, nil], + [nil, nil, nil, true] + ] + .map { |row| columns.zip(row).to_h } + end + + let(:final_column_values) do + [ + [2, false, false, false], + [0, false, false, false], + [2, true, false, false], + [2, false, true, false], + [2, false, false, true] + ] + .map { |row| columns.zip(row).to_h } + end + + subject(:perform_migration) do + described_class + .new( + start_id: users.minimum(:id), + end_id: users.maximum(:id), + batch_table: :users, + batch_column: :id, + sub_batch_size: 2, + pause_ms: 0, + connection: ActiveRecord::Base.connection + ) + .perform + end + + before do + initial_column_values.each_with_index do |attributes, index| + user = users.create!(**attributes.merge(projects_limit: 1, email: "user#{index}@gitlab.com")) + final_column_values[index].merge!(id: user.id) + end + end + + it 'backfills the null values with the default values' do + perform_migration + + final_column_values.each { |attributes| match_attributes(attributes) } + end + + private + + def match_attributes(attributes) + migrated_user = users.find(attributes[:id]) + expect(migrated_user.project_view).to eq(attributes[:project_view]) + expect(migrated_user.hide_no_ssh_key).to eq(attributes[:hide_no_ssh_key]) + expect(migrated_user.hide_no_password).to eq(attributes[:hide_no_password]) + expect(migrated_user.notified_of_own_activity).to eq(attributes[:notified_of_own_activity]) + end +end diff --git a/spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb b/spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb new file mode 100644 index 00000000000..97f69afca55 --- /dev/null +++ b/spec/lib/gitlab/background_migration/convert_credit_card_validation_data_to_hashes_spec.rb @@ -0,0 +1,81 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::ConvertCreditCardValidationDataToHashes, schema: 20230821081603, feature_category: :user_profile do # rubocop:disable Layout/LineLength + let(:users_table) { table(:users) } + let(:credit_card_validations_table) { table(:user_credit_card_validations) } + let(:rows) { 5 } + + describe '#perform' do + let(:network) { 'Visa' } + let(:holder_name) { 'John Smith' } + let(:last_digits) { 1111 } + let(:expiration_date) { 1.year.from_now.to_date } + + subject(:perform_migration) do + described_class.new( + start_id: 1, + end_id: rows, + batch_table: :user_credit_card_validations, + batch_column: :user_id, + sub_batch_size: 2, + pause_ms: 0, + connection: ActiveRecord::Base.connection + ).perform + end + + before do + (1..rows).each do |i| + users_table.create!(id: i, username: "John #{i}", email: "johndoe_#{i}@gitlab.com", projects_limit: 10) + + credit_card_validations_table.create!( + id: i, + user_id: i, + network: network, + holder_name: holder_name, + last_digits: last_digits, + expiration_date: expiration_date, + credit_card_validated_at: Date.today + ) + end + end + + it 'updates values to hash for records in the specified batch', :aggregate_failures do + perform_migration + + (1..rows).each do |i| + credit_card = credit_card_validations_table.find_by(user_id: i) + + expect(credit_card.last_digits_hash).to eq(hashed_value(last_digits)) + expect(credit_card.holder_name_hash).to eq(hashed_value(holder_name.downcase)) + expect(credit_card.network_hash).to eq(hashed_value(network.downcase)) + expect(credit_card.expiration_date_hash).to eq(hashed_value(expiration_date.to_s)) + end + end + + context 'with NULL columns' do + let(:network) { nil } + let(:holder_name) { nil } + let(:last_digits) { nil } + let(:expiration_date) { nil } + + it 'does not update values for records in the specified batch', :aggregate_failures do + perform_migration + + (1..rows).each do |i| + credit_card = credit_card_validations_table.find_by(user_id: i) + + expect(credit_card.last_digits_hash).to eq(nil) + expect(credit_card.holder_name_hash).to eq(nil) + expect(credit_card.network_hash).to eq(nil) + expect(credit_card.expiration_date_hash).to eq(nil) + end + end + end + end + + def hashed_value(value) + Gitlab::CryptoHelper.sha256(value) + end +end diff --git a/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb b/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb deleted file mode 100644 index 195e57e4e59..00000000000 --- a/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::RebalancePartitionId, - :migration, - schema: 20230125093723, - feature_category: :continuous_integration do - let(:ci_builds_table) { table(:ci_builds, database: :ci) } - let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) } - - let!(:valid_ci_pipeline) { ci_pipelines_table.create!(id: 1, partition_id: 100) } - let!(:invalid_ci_pipeline) { ci_pipelines_table.create!(id: 2, partition_id: 101) } - - describe '#perform' do - using RSpec::Parameterized::TableSyntax - - where(:table_name, :invalid_record, :valid_record) do - :ci_pipelines | invalid_ci_pipeline | valid_ci_pipeline - end - - subject(:perform) do - described_class.new( - start_id: 1, - end_id: 2, - batch_table: table_name, - batch_column: :id, - sub_batch_size: 1, - pause_ms: 0, - connection: Ci::ApplicationRecord.connection - ).perform - end - - shared_examples 'fix invalid records' do - it 'rebalances partition_id to 100 when partition_id is 101' do - expect { perform } - .to change { invalid_record.reload.partition_id }.from(101).to(100) - .and not_change { valid_record.reload.partition_id } - end - end - - with_them do - it_behaves_like 'fix invalid records' - end - end -end diff --git a/spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb b/spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb new file mode 100644 index 00000000000..19ad70337dc --- /dev/null +++ b/spec/lib/gitlab/background_migration/update_users_set_external_if_service_account_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::UpdateUsersSetExternalIfServiceAccount, feature_category: :system_access do + describe "#perform" do + let(:users_table) { table(:users) } + let(:service_account_user) do + users_table.create!(username: 'john_doe', email: 'johndoe@gitlab.com', + user_type: HasUserType::USER_TYPES[:service_account], projects_limit: 5) + end + + let(:service_user) do + users_table.create!(username: 'john_doe2', email: 'johndoe2@gitlab.com', + user_type: HasUserType::USER_TYPES[:service_user], projects_limit: 5) + end + + let(:table_name) { :users } + let(:batch_column) { :id } + let(:sub_batch_size) { 2 } + let(:pause_ms) { 0 } + let(:migration) do + described_class.new( + start_id: service_account_user.id, end_id: service_user.id, + batch_table: table_name, batch_column: batch_column, + sub_batch_size: sub_batch_size, pause_ms: pause_ms, + connection: ApplicationRecord.connection + ) + end + + subject(:perform_migration) do + migration.perform + end + + it "changes external field for service_account user" do + perform_migration + + expect(service_account_user.reload.external).to eq(true) + expect(service_user.reload.external).to eq(false) + end + end +end diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb index 4c94ecfe745..9786e7a364e 100644 --- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb +++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb @@ -92,6 +92,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea describe '#import_pull_requests' do let(:source_branch_sha) { sample.commits.last } + let(:merge_commit_sha) { sample.commits.second } let(:target_branch_sha) { sample.commits.first } let(:pull_request) do instance_double( @@ -101,6 +102,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch, target_branch_sha: target_branch_sha, target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch, + merge_commit_sha: merge_commit_sha, title: 'This is a title', description: 'This is a test pull request', state: 'merged', @@ -217,17 +219,29 @@ RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, fea end end - context "when branches' sha is not found in the repository" do + context 'when source SHA is not found in the repository' do let(:source_branch_sha) { 'a' * Commit::MIN_SHA_LENGTH } - let(:target_branch_sha) { 'b' * Commit::MIN_SHA_LENGTH } + let(:target_branch_sha) { 'c' * Commit::MIN_SHA_LENGTH } - it 'uses the pull request sha references' do + it 'uses merge commit SHA for source' do expect { subject.execute }.to change { MergeRequest.count }.by(1) merge_request_diff = MergeRequest.first.merge_request_diff - expect(merge_request_diff.head_commit_sha).to eq source_branch_sha + expect(merge_request_diff.head_commit_sha).to eq merge_commit_sha expect(merge_request_diff.start_commit_sha).to eq target_branch_sha end + + context 'when the merge commit SHA is also not found' do + let(:merge_commit_sha) { 'b' * Commit::MIN_SHA_LENGTH } + + it 'uses the pull request sha references' do + expect { subject.execute }.to change { MergeRequest.count }.by(1) + + merge_request_diff = MergeRequest.first.merge_request_diff + expect(merge_request_diff.head_commit_sha).to eq source_branch_sha + expect(merge_request_diff.start_commit_sha).to eq target_branch_sha + end + end end context "when target_branch_sha is blank" do diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb new file mode 100644 index 00000000000..2eca6bb47d6 --- /dev/null +++ b/spec/lib/gitlab/bitbucket_import/importers/pull_request_importer_spec.rb @@ -0,0 +1,166 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestImporter, :clean_gitlab_redis_cache, feature_category: :importers do + include AfterNextHelpers + + let_it_be(:project) { create(:project, :repository) } + let_it_be(:bitbucket_user) { create(:user) } + let_it_be(:user_2) { create(:user) } + let_it_be(:user_3) { create(:user) } + let_it_be(:identity) { create(:identity, user: bitbucket_user, extern_uid: 'bitbucket_user', provider: :bitbucket) } + let_it_be(:identity_2) { create(:identity, user: user_2, extern_uid: 'user_2', provider: :bitbucket) } + let(:source_branch_sha) { project.repository.commit.sha } + let(:target_branch_sha) { project.repository.commit('refs/heads/master').sha } + + let(:hash) do + { + author: 'bitbucket_user', + created_at: Date.today, + description: 'description', + iid: 11, + source_branch_name: 'source-branch-name', + source_branch_sha: source_branch_sha, + state: 'merged', + target_branch_name: 'destination-branch-name', + target_branch_sha: target_branch_sha, + title: 'title', + updated_at: Date.today, + reviewers: %w[user_2 user_3] + } + end + + subject(:importer) { described_class.new(project, hash) } + + describe '#execute' do + it 'calls MergeRequestCreator' do + expect(Gitlab::Import::MergeRequestCreator).to receive_message_chain(:new, :execute) + + importer.execute + end + + it 'creates a merge request with the correct attributes' do + expect { importer.execute }.to change { project.merge_requests.count }.from(0).to(1) + + merge_request = project.merge_requests.first + + expect(merge_request.iid).to eq(11) + expect(merge_request.author).to eq(bitbucket_user) + expect(merge_request.title).to eq('title') + expect(merge_request.merged?).to be_truthy + expect(merge_request.created_at).to eq(Date.today) + expect(merge_request.description).to eq('description') + expect(merge_request.source_project_id).to eq(project.id) + expect(merge_request.target_project_id).to eq(project.id) + expect(merge_request.source_branch).to eq('source-branch-name') + expect(merge_request.target_branch).to eq('destination-branch-name') + expect(merge_request.assignee_ids).to eq([bitbucket_user.id]) + expect(merge_request.reviewer_ids).to eq([user_2.id]) + expect(merge_request.merge_request_diffs.first.base_commit_sha).to eq(source_branch_sha) + expect(merge_request.merge_request_diffs.first.head_commit_sha).to eq(target_branch_sha) + end + + context 'when the state is closed' do + it 'marks merge request as closed' do + described_class.new(project, hash.merge(state: 'closed')).execute + + expect(project.merge_requests.first.closed?).to be_truthy + end + end + + context 'when the state is opened' do + it 'marks merge request as opened' do + described_class.new(project, hash.merge(state: 'opened')).execute + + expect(project.merge_requests.first.opened?).to be_truthy + end + end + + context 'when the author does not have a bitbucket identity' do + before do + identity.update!(provider: :github) + end + + it 'sets the author and assignee to the project creator and adds the author to the description' do + importer.execute + + merge_request = project.merge_requests.first + + expect(merge_request.author).to eq(project.creator) + expect(merge_request.assignee).to eq(project.creator) + expect(merge_request.description).to eq("*Created by: bitbucket_user*\n\ndescription") + end + end + + context 'when none of the reviewers have an identity' do + before do + identity_2.destroy! + end + + it 'does not set reviewer_ids' do + importer.execute + + merge_request = project.merge_requests.first + + expect(merge_request.reviewer_ids).to be_empty + end + end + + describe 'head_commit_sha for merge request diff' do + let(:diff) { project.merge_requests.first.merge_request_diffs.first } + let(:min_length) { Commit::MIN_SHA_LENGTH } + + context 'when the source commit hash from Bitbucket is found on the repo' do + it 'is set to the source commit hash' do + described_class.new(project, hash.merge(source_branch_sha: source_branch_sha)).execute + + expect(diff.head_commit_sha).to eq(source_branch_sha) + end + end + + context 'when the source commit hash is not found but the merge commit hash is found' do + it 'is set to the merge commit hash' do + attrs = { source_branch_sha: 'x' * min_length, merge_commit_sha: source_branch_sha } + + described_class.new(project, hash.merge(attrs)).execute + + expect(diff.head_commit_sha).to eq(source_branch_sha) + end + end + + context 'when both the source commit and merge commit hash are not found' do + it 'is nil' do + attrs = { source_branch_sha: 'x' * min_length, merge_commit_sha: 'y' * min_length } + + described_class.new(project, hash.merge(attrs)).execute + + expect(diff.head_commit_sha).to be_nil + end + end + end + + context 'when an error is raised' do + before do + allow(Gitlab::Import::MergeRequestCreator).to receive(:new).and_raise(StandardError) + end + + it 'tracks the failure and does not fail' do + expect(Gitlab::Import::ImportFailureService).to receive(:track).once + + importer.execute + end + end + + it 'logs its progress' do + allow(Gitlab::Import::MergeRequestCreator).to receive_message_chain(:new, :execute) + + expect(Gitlab::BitbucketImport::Logger) + .to receive(:info).with(include(message: 'starting', iid: anything)).and_call_original + expect(Gitlab::BitbucketImport::Logger) + .to receive(:info).with(include(message: 'finished', iid: anything)).and_call_original + + importer.execute + end + end +end diff --git a/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb new file mode 100644 index 00000000000..46bf099de0c --- /dev/null +++ b/spec/lib/gitlab/bitbucket_import/importers/pull_requests_importer_spec.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, feature_category: :importers do + let_it_be(:project) do + create(:project, :import_started, + import_data_attributes: { + data: { 'project_key' => 'key', 'repo_slug' => 'slug' }, + credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' } + } + ) + end + + subject(:importer) { described_class.new(project) } + + describe '#execute', :clean_gitlab_redis_cache do + before do + allow_next_instance_of(Bitbucket::Client) do |client| + allow(client).to receive(:pull_requests).and_return( + [ + Bitbucket::Representation::PullRequest.new({ 'id' => 1, 'state' => 'OPENED' }), + Bitbucket::Representation::PullRequest.new({ 'id' => 2, 'state' => 'DECLINED' }), + Bitbucket::Representation::PullRequest.new({ 'id' => 3, 'state' => 'MERGED' }) + ], + [] + ) + end + end + + it 'imports each pull request in parallel', :aggregate_failures do + expect(Gitlab::BitbucketImport::ImportPullRequestWorker).to receive(:perform_in).exactly(3).times + + waiter = importer.execute + + expect(waiter).to be_an_instance_of(Gitlab::JobWaiter) + expect(waiter.jobs_remaining).to eq(3) + expect(Gitlab::Cache::Import::Caching.values_from_set(importer.already_enqueued_cache_key)) + .to match_array(%w[1 2 3]) + end + + context 'when the client raises an error' do + before do + allow_next_instance_of(Bitbucket::Client) do |client| + allow(client).to receive(:pull_requests).and_raise(StandardError) + end + end + + it 'tracks the failure and does not fail' do + expect(Gitlab::Import::ImportFailureService).to receive(:track).once + + importer.execute + end + end + + context 'when pull request was already enqueued' do + before do + Gitlab::Cache::Import::Caching.set_add(importer.already_enqueued_cache_key, 1) + end + + it 'does not schedule job for enqueued pull requests', :aggregate_failures do + expect(Gitlab::BitbucketImport::ImportPullRequestWorker).to receive(:perform_in).twice + + waiter = importer.execute + + expect(waiter).to be_an_instance_of(Gitlab::JobWaiter) + expect(waiter.jobs_remaining).to eq(3) + end + end + end +end diff --git a/spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb new file mode 100644 index 00000000000..1caf0b884c2 --- /dev/null +++ b/spec/lib/gitlab/bitbucket_import/importers/repository_importer_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BitbucketImport::Importers::RepositoryImporter, feature_category: :importers do + let_it_be(:project) { create(:project, import_url: 'https://bitbucket.org/vim/vim.git') } + + subject(:importer) { described_class.new(project) } + + describe '#execute' do + context 'when repository is empty' do + it 'imports the repository' do + expect(project.repository).to receive(:import_repository).with(project.import_url) + expect(project.repository).to receive(:fetch_as_mirror).with(project.import_url, + refmap: ['+refs/pull-requests/*/to:refs/merge-requests/*/head']) + expect(project.last_repository_updated_at).to be_present + + importer.execute + end + end + + context 'when repository is not empty' do + before do + allow(project).to receive(:empty_repo?).and_return(false) + + project.last_repository_updated_at = 1.day.ago + end + + it 'does not import the repository' do + expect(project.repository).not_to receive(:import_repository) + + expect { importer.execute }.not_to change { project.last_repository_updated_at } + end + end + + context 'when a Git CommandError is raised and the repository exists' do + before do + allow(project.repository).to receive(:import_repository).and_raise(::Gitlab::Git::CommandError) + allow(project).to receive(:repository_exists?).and_return(true) + end + + it 'expires repository caches' do + expect(project.repository).to receive(:expire_content_cache) + + expect { importer.execute }.to raise_error(::Gitlab::Git::CommandError) + end + end + end +end diff --git a/spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb b/spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb new file mode 100644 index 00000000000..29919c43d23 --- /dev/null +++ b/spec/lib/gitlab/bitbucket_import/parallel_importer_spec.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BitbucketImport::ParallelImporter, feature_category: :importers do + subject { described_class } + + it { is_expected.to be_async } + + describe '.track_start_import' do + it 'tracks the start of import' do + project = build_stubbed(:project) + + expect_next_instance_of(Gitlab::Import::Metrics, :bitbucket_importer, project) do |metric| + expect(metric).to receive(:track_start_import) + end + + subject.track_start_import(project) + end + end + + describe '#execute', :clean_gitlab_redis_shared_state do + let_it_be(:project) { create(:project) } + let(:importer) { subject.new(project) } + + before do + create(:import_state, :started, project: project) + end + + it 'schedules the importing of the repository' do + expect(Gitlab::BitbucketImport::Stage::ImportRepositoryWorker) + .to receive_message_chain(:with_status, :perform_async).with(project.id) + + expect(importer.execute).to eq(true) + end + + it 'sets the JID in Redis' do + expect(Gitlab::Import::SetAsyncJid).to receive(:set_jid).with(project.import_state).and_call_original + + importer.execute + end + end +end diff --git a/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb b/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb new file mode 100644 index 00000000000..4ac4c2e4813 --- /dev/null +++ b/spec/lib/gitlab/bitbucket_import/user_finder_spec.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BitbucketImport::UserFinder, :clean_gitlab_redis_cache, feature_category: :importers do + let_it_be(:user) { create(:user) } + let_it_be(:identity) { create(:identity, user: user, extern_uid: 'uid', provider: :bitbucket) } + let(:created_id) { 1 } + let(:project) { instance_double(Project, creator_id: created_id, id: 1) } + let(:author) { 'uid' } + let(:cache_key) { format(described_class::USER_ID_FOR_AUTHOR_CACHE_KEY, project_id: project.id, author: author) } + + subject(:user_finder) { described_class.new(project) } + + describe '#find_user_id' do + it 'returns the user id' do + expect(User).to receive(:by_provider_and_extern_uid).and_call_original.once + + expect(user_finder.find_user_id(author)).to eq(user.id) + expect(user_finder.find_user_id(author)).to eq(user.id) + end + + context 'when the id is cached' do + before do + Gitlab::Cache::Import::Caching.write(cache_key, user.id) + end + + it 'does not attempt to find the user' do + expect(User).not_to receive(:by_provider_and_extern_uid) + + expect(user_finder.find_user_id(author)).to eq(user.id) + end + end + + context 'when -1 is cached' do + before do + Gitlab::Cache::Import::Caching.write(cache_key, -1) + end + + it 'does not attempt to find the user and returns nil' do + expect(User).not_to receive(:by_provider_and_extern_uid) + + expect(user_finder.find_user_id(author)).to be_nil + end + end + + context 'when the user does not have a matching bitbucket identity' do + before do + identity.update!(provider: :github) + end + + it 'returns nil' do + expect(user_finder.find_user_id(author)).to be_nil + end + end + end + + describe '#gitlab_user_id' do + context 'when find_user_id returns a user' do + it 'returns the user id' do + expect(user_finder.gitlab_user_id(project, author)).to eq(user.id) + end + end + + context 'when find_user_id does not return a user' do + before do + allow(user_finder).to receive(:find_user_id).and_return(nil) + end + + it 'returns the project creator' do + expect(user_finder.gitlab_user_id(project, author)).to eq(created_id) + end + end + end +end diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb deleted file mode 100644 index 4ff61bf329c..00000000000 --- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb +++ /dev/null @@ -1,653 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BitbucketServerImport::Importer, feature_category: :importers do - include ImportSpecHelper - - let(:import_url) { 'http://my-bitbucket' } - let(:bitbucket_user) { 'bitbucket' } - let(:project_creator) { create(:user, username: 'project_creator', email: 'project_creator@example.org') } - let(:password) { 'test' } - let(:project) { create(:project, :repository, import_url: import_url, creator: project_creator) } - let(:now) { Time.now.utc.change(usec: 0) } - let(:project_key) { 'TEST' } - let(:repo_slug) { 'rouge-repo' } - let(:sample) { RepoHelpers.sample_compare } - - subject { described_class.new(project, recover_missing_commits: true) } - - before do - data = project.create_or_update_import_data( - data: { project_key: project_key, repo_slug: repo_slug }, - credentials: { base_uri: import_url, user: bitbucket_user, password: password } - ) - data.save! - project.save! - end - - describe '#import_repository' do - let(:repo_url) { 'http://bitbucket:test@my-bitbucket' } - - before do - expect(project.repository).to receive(:import_repository).with(repo_url) - end - - it 'adds a remote' do - expect(subject).to receive(:import_pull_requests) - expect(subject).to receive(:delete_temp_branches) - expect(project.repository).to receive(:fetch_as_mirror) - .with(repo_url, - refmap: ['+refs/pull-requests/*/to:refs/merge-requests/*/head']) - - subject.execute - end - - it 'raises a Gitlab::Git::CommandError in the fetch' do - expect(project.repository).to receive(:fetch_as_mirror).and_raise(::Gitlab::Git::CommandError) - - expect { subject.execute }.to raise_error(::Gitlab::Git::CommandError) - end - - it 'raises an unhandled exception in the fetch' do - expect(project.repository).to receive(:fetch_as_mirror).and_raise(RuntimeError) - - expect { subject.execute }.to raise_error(RuntimeError) - end - end - - describe '#import_pull_requests' do - let(:pull_request_author) { create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org') } - let(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') } - - let(:pull_request) do - instance_double( - BitbucketServer::Representation::PullRequest, - iid: 10, - source_branch_sha: sample.commits.last, - source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch, - target_branch_sha: sample.commits.first, - target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch, - title: 'This is a title', - description: 'This is a test pull request', - reviewers: [], - state: 'merged', - author: 'Test Author', - author_email: pull_request_author.email, - author_username: pull_request_author.username, - created_at: Time.now, - updated_at: Time.now, - raw: {}, - merged?: true) - end - - let(:merge_event) do - instance_double( - BitbucketServer::Representation::Activity, - comment?: false, - merge_event?: true, - committer_email: pull_request_author.email, - merge_timestamp: now, - merge_commit: '12345678' - ) - end - - let(:pr_note) do - instance_double( - BitbucketServer::Representation::Comment, - note: 'Hello world', - author_email: note_author.email, - author_username: note_author.username, - comments: [], - created_at: now, - updated_at: now, - parent_comment: nil) - end - - let(:pr_comment) do - instance_double( - BitbucketServer::Representation::Activity, - comment?: true, - inline_comment?: false, - merge_event?: false, - comment: pr_note) - end - - before do - allow(subject).to receive(:import_repository) - allow(subject).to receive(:delete_temp_branches) - allow(subject).to receive(:restore_branches) - - allow(subject.client).to receive(:pull_requests).and_return([pull_request], []) - end - - # As we are using Caching with redis, it is best to clean the cache after each test run, else we need to wait for - # the expiration by the importer - after do - Gitlab::Cache::Import::Caching.expire(subject.already_imported_cache_key, 0) - end - - it 'imports merge event' do - expect(subject.client).to receive(:activities).and_return([merge_event]) - - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.metrics.merged_by).to eq(pull_request_author) - expect(merge_request.metrics.merged_at).to eq(merge_event.merge_timestamp) - expect(merge_request.merge_commit_sha).to eq('12345678') - expect(merge_request.state_id).to eq(3) - end - - describe 'pull request author user mapping' do - before do - allow(subject.client).to receive(:activities).and_return([merge_event]) - end - - shared_examples 'imports pull requests' do - it 'maps user' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.author).to eq(expected_author) - end - end - - context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do - before do - stub_feature_flags(bitbucket_server_user_mapping_by_username: false) - end - - context 'when email is not present' do - before do - allow(pull_request).to receive(:author_email).and_return(nil) - end - - let(:expected_author) { project_creator } - - include_examples 'imports pull requests' - end - - context 'when email is present' do - before do - allow(pull_request).to receive(:author_email).and_return(pull_request_author.email) - end - - let(:expected_author) { pull_request_author } - - include_examples 'imports pull requests' - end - end - - context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do - before do - stub_feature_flags(bitbucket_server_user_mapping_by_username: true) - end - - context 'when username is not present' do - before do - allow(pull_request).to receive(:author_username).and_return(nil) - end - - let(:expected_author) { project_creator } - - include_examples 'imports pull requests' - end - - context 'when username is present' do - before do - allow(pull_request).to receive(:author_username).and_return(pull_request_author.username) - end - - let(:expected_author) { pull_request_author } - - include_examples 'imports pull requests' - end - end - - context 'when user is not found' do - before do - allow(pull_request).to receive(:author_username).and_return(nil) - allow(pull_request).to receive(:author_email).and_return(nil) - end - - it 'maps importer user' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.author).to eq(project_creator) - end - end - end - - describe 'comments' do - shared_examples 'imports comments' do - it 'imports comments' do - expect(subject.client).to receive(:activities).and_return([pr_comment]) - - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.notes.count).to eq(1) - note = merge_request.notes.first - expect(note.note).to end_with(pr_note.note) - expect(note.author).to eq(note_author) - expect(note.created_at).to eq(pr_note.created_at) - expect(note.updated_at).to eq(pr_note.created_at) - end - end - - context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do - before do - stub_feature_flags(bitbucket_server_user_mapping_by_username: false) - end - - include_examples 'imports comments' - end - - context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do - before do - stub_feature_flags(bitbucket_server_user_mapping_by_username: true) - end - - include_examples 'imports comments' - - context 'when username is not present' do - before do - allow(pr_note).to receive(:author_username).and_return(nil) - allow(subject.client).to receive(:activities).and_return([pr_comment]) - end - - it 'defaults to import user' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.notes.count).to eq(1) - note = merge_request.notes.first - expect(note.author).to eq(project_creator) - end - end - - context 'when username is present' do - before do - allow(pr_note).to receive(:author_username).and_return(note_author.username) - allow(subject.client).to receive(:activities).and_return([pr_comment]) - end - - it 'maps by username' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.notes.count).to eq(1) - note = merge_request.notes.first - expect(note.author).to eq(note_author) - end - end - end - end - - context 'metrics' do - let(:histogram) { double(:histogram).as_null_object } - let(:counter) { double('counter', increment: true) } - - before do - allow(Gitlab::Metrics).to receive(:counter) { counter } - allow(Gitlab::Metrics).to receive(:histogram) { histogram } - allow(subject.client).to receive(:activities).and_return([merge_event]) - end - - it 'counts and measures duration of imported projects' do - expect(Gitlab::Metrics).to receive(:counter).with( - :bitbucket_server_importer_imported_projects_total, - 'The number of imported projects' - ) - - expect(Gitlab::Metrics).to receive(:histogram).with( - :bitbucket_server_importer_total_duration_seconds, - 'Total time spent importing projects, in seconds', - {}, - Gitlab::Import::Metrics::IMPORT_DURATION_BUCKETS - ) - - expect(counter).to receive(:increment) - expect(histogram).to receive(:observe).with({ importer: :bitbucket_server_importer }, anything) - - subject.execute - end - - it 'counts imported pull requests' do - expect(Gitlab::Metrics).to receive(:counter).with( - :bitbucket_server_importer_imported_merge_requests_total, - 'The number of imported merge (pull) requests' - ) - - expect(counter).to receive(:increment) - - subject.execute - end - end - - describe 'threaded discussions' do - let(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') } - let(:inline_note_author) { create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org') } - - let(:reply) do - instance_double( - BitbucketServer::Representation::PullRequestComment, - author_email: reply_author.email, - author_username: reply_author.username, - note: 'I agree', - created_at: now, - updated_at: now) - end - - # https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad - let(:inline_note) do - instance_double( - BitbucketServer::Representation::PullRequestComment, - file_type: 'ADDED', - from_sha: sample.commits.first, - to_sha: sample.commits.last, - file_path: '.gitmodules', - old_pos: nil, - new_pos: 4, - note: 'Hello world', - author_email: inline_note_author.email, - author_username: inline_note_author.username, - comments: [reply], - created_at: now, - updated_at: now, - parent_comment: nil) - end - - let(:inline_comment) do - instance_double( - BitbucketServer::Representation::Activity, - comment?: true, - inline_comment?: true, - merge_event?: false, - comment: inline_note) - end - - before do - allow(reply).to receive(:parent_comment).and_return(inline_note) - allow(subject.client).to receive(:activities).and_return([inline_comment]) - end - - shared_examples 'imports threaded discussions' do - it 'imports threaded discussions' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.notes.count).to eq(2) - expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1) - - notes = merge_request.notes.order(:id).to_a - start_note = notes.first - expect(start_note.type).to eq('DiffNote') - expect(start_note.note).to end_with(inline_note.note) - expect(start_note.created_at).to eq(inline_note.created_at) - expect(start_note.updated_at).to eq(inline_note.updated_at) - expect(start_note.position.base_sha).to eq(inline_note.from_sha) - expect(start_note.position.start_sha).to eq(inline_note.from_sha) - expect(start_note.position.head_sha).to eq(inline_note.to_sha) - expect(start_note.position.old_line).to be_nil - expect(start_note.position.new_line).to eq(inline_note.new_pos) - expect(start_note.author).to eq(inline_note_author) - - reply_note = notes.last - # Make sure author and reply context is included - expect(reply_note.note).to start_with("> #{inline_note.note}\n\n#{reply.note}") - expect(reply_note.author).to eq(reply_author) - expect(reply_note.created_at).to eq(reply.created_at) - expect(reply_note.updated_at).to eq(reply.created_at) - expect(reply_note.position.base_sha).to eq(inline_note.from_sha) - expect(reply_note.position.start_sha).to eq(inline_note.from_sha) - expect(reply_note.position.head_sha).to eq(inline_note.to_sha) - expect(reply_note.position.old_line).to be_nil - expect(reply_note.position.new_line).to eq(inline_note.new_pos) - end - end - - context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do - before do - stub_feature_flags(bitbucket_server_user_mapping_by_username: false) - end - - include_examples 'imports threaded discussions' - end - - context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do - before do - stub_feature_flags(bitbucket_server_user_mapping_by_username: true) - end - - include_examples 'imports threaded discussions' do - context 'when username is not present' do - before do - allow(reply).to receive(:author_username).and_return(nil) - allow(inline_note).to receive(:author_username).and_return(nil) - end - - it 'defaults to import user' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - notes = MergeRequest.first.notes.order(:id).to_a - - expect(notes.first.author).to eq(project_creator) - expect(notes.last.author).to eq(project_creator) - end - end - end - end - - context 'when user is not found' do - before do - allow(reply).to receive(:author_username).and_return(nil) - allow(reply).to receive(:author_email).and_return(nil) - allow(inline_note).to receive(:author_username).and_return(nil) - allow(inline_note).to receive(:author_email).and_return(nil) - end - - it 'maps importer user' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - notes = MergeRequest.first.notes.order(:id).to_a - - expect(notes.first.author).to eq(project_creator) - expect(notes.last.author).to eq(project_creator) - end - end - end - - it 'falls back to comments if diff comments fail to validate' do - reply = instance_double( - BitbucketServer::Representation::Comment, - author_email: 'someuser@gitlab.com', - author_username: 'Aquaman', - note: 'I agree', - created_at: now, - updated_at: now) - - # https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad - inline_note = instance_double( - BitbucketServer::Representation::PullRequestComment, - file_type: 'REMOVED', - from_sha: sample.commits.first, - to_sha: sample.commits.last, - file_path: '.gitmodules', - old_pos: 8, - new_pos: 9, - note: 'This is a note with an invalid line position.', - author_email: project.owner.email, - author_username: 'Owner', - comments: [reply], - created_at: now, - updated_at: now, - parent_comment: nil) - - inline_comment = instance_double( - BitbucketServer::Representation::Activity, - comment?: true, - inline_comment?: true, - merge_event?: false, - comment: inline_note) - - allow(reply).to receive(:parent_comment).and_return(inline_note) - - expect(subject.client).to receive(:activities).and_return([inline_comment]) - - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - merge_request = MergeRequest.first - expect(merge_request.notes.count).to eq(2) - notes = merge_request.notes - - expect(notes.first.note).to start_with('*Comment on .gitmodules') - expect(notes.second.note).to start_with('*Comment on .gitmodules') - end - - it 'reports an error if an exception is raised' do - allow(subject).to receive(:import_bitbucket_pull_request).and_raise(RuntimeError) - expect(Gitlab::ErrorTracking).to receive(:log_exception) - - subject.execute - end - - describe 'import pull requests with caching' do - let(:pull_request_already_imported) do - instance_double( - BitbucketServer::Representation::PullRequest, - iid: 11) - end - - let(:pull_request_to_be_imported) do - instance_double( - BitbucketServer::Representation::PullRequest, - iid: 12, - source_branch_sha: sample.commits.last, - source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch, - target_branch_sha: sample.commits.first, - target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch, - title: 'This is a title', - description: 'This is a test pull request', - reviewers: sample.reviewers, - state: 'merged', - author: 'Test Author', - author_email: pull_request_author.email, - author_username: pull_request_author.username, - created_at: Time.now, - updated_at: Time.now, - raw: {}, - merged?: true) - end - - before do - Gitlab::Cache::Import::Caching.set_add(subject.already_imported_cache_key, pull_request_already_imported.iid) - allow(subject.client).to receive(:pull_requests).and_return([pull_request_to_be_imported, pull_request_already_imported], []) - end - - it 'only imports one Merge Request, as the other on is in the cache' do - expect(subject.client).to receive(:activities).and_return([merge_event]) - expect { subject.execute }.to change { MergeRequest.count }.by(1) - - expect(Gitlab::Cache::Import::Caching.set_includes?(subject.already_imported_cache_key, pull_request_already_imported.iid)).to eq(true) - expect(Gitlab::Cache::Import::Caching.set_includes?(subject.already_imported_cache_key, pull_request_to_be_imported.iid)).to eq(true) - end - end - end - - describe 'inaccessible branches' do - let(:id) { 10 } - let(:temp_branch_from) { "gitlab/import/pull-request/#{id}/from" } - let(:temp_branch_to) { "gitlab/import/pull-request/#{id}/to" } - - before do - pull_request = instance_double( - BitbucketServer::Representation::PullRequest, - iid: id, - source_branch_sha: '12345678', - source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch, - target_branch_sha: '98765432', - target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch, - title: 'This is a title', - description: 'This is a test pull request', - reviewers: [], - state: 'merged', - author: 'Test Author', - author_email: project.owner.email, - author_username: 'author', - created_at: Time.now, - updated_at: Time.now, - merged?: true) - - expect(subject.client).to receive(:pull_requests).and_return([pull_request], []) - expect(subject.client).to receive(:activities).and_return([]) - expect(subject).to receive(:import_repository).twice - end - - it '#restore_branches' do - expect(subject).to receive(:restore_branches).and_call_original - expect(subject).to receive(:delete_temp_branches) - expect(subject.client).to receive(:create_branch) - .with(project_key, repo_slug, - temp_branch_from, - '12345678') - expect(subject.client).to receive(:create_branch) - .with(project_key, repo_slug, - temp_branch_to, - '98765432') - - expect { subject.execute }.to change { MergeRequest.count }.by(1) - end - - it '#delete_temp_branches' do - expect(subject.client).to receive(:create_branch).twice - expect(subject).to receive(:delete_temp_branches).and_call_original - expect(subject.client).to receive(:delete_branch) - .with(project_key, repo_slug, - temp_branch_from, - '12345678') - expect(subject.client).to receive(:delete_branch) - .with(project_key, repo_slug, - temp_branch_to, - '98765432') - expect(project.repository).to receive(:delete_branch).with(temp_branch_from) - expect(project.repository).to receive(:delete_branch).with(temp_branch_to) - - expect { subject.execute }.to change { MergeRequest.count }.by(1) - end - end - - context "lfs files" do - before do - allow(project).to receive(:lfs_enabled?).and_return(true) - allow(subject).to receive(:import_repository) - allow(subject).to receive(:import_pull_requests) - end - - it "downloads lfs objects if lfs_enabled is enabled for project" do - expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |lfs_import_service| - expect(lfs_import_service).to receive(:execute).and_return(status: :success) - end - - subject.execute - end - - it "adds the error message when the lfs download fails" do - allow_next_instance_of(Projects::LfsPointers::LfsImportService) do |lfs_import_service| - expect(lfs_import_service).to receive(:execute).and_return(status: :error, message: "LFS server not reachable") - end - - subject.execute - - expect(project.import_state.reload.last_error).to eq(Gitlab::Json.dump({ - message: "The remote data could not be fully imported.", - errors: [{ - type: "lfs_objects", - errors: "The Lfs import process failed. LFS server not reachable" - }] - })) - end - end -end diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb index c65a1e4d656..5397aea90a9 100644 --- a/spec/lib/gitlab/checks/matching_merge_request_spec.rb +++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb @@ -31,33 +31,40 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do expect(matcher.match?).to be false end - context 'with load balancing enabled' do + context 'with load balancing enabled', :redis do let(:session) { ::Gitlab::Database::LoadBalancing::Session.current } - let(:all_caught_up) { true } before do + # Need to mock as though we actually have replicas + allow(::ApplicationRecord.load_balancer) + .to receive(:primary_only?) + .and_return(false) + + # Put some sticking position for the primary in Redis + ::ApplicationRecord.sticking.stick(:project, project.id) + Gitlab::Database::LoadBalancing::Session.clear_session - allow(::ApplicationRecord.sticking) - .to receive(:all_caught_up?) - .and_return(all_caught_up) + # Mock the load balancer result since we don't actually have real replicas to match against + expect(::ApplicationRecord.load_balancer) + .to receive(:select_up_to_date_host) + .and_return(load_balancer_result) + # Expect sticking called with correct arguments but don't mock it so that we can also test the internal + # behaviour of updating the Session.use_primary? expect(::ApplicationRecord.sticking) - .to receive(:select_valid_host) - .with(:project, project.id) + .to receive(:find_caught_up_replica) + .with(:project, project.id, use_primary_on_empty_location: true) .and_call_original - - allow(::ApplicationRecord.sticking) - .to receive(:select_caught_up_replicas) - .with(:project, project.id) - .and_return(all_caught_up) end after do Gitlab::Database::LoadBalancing::Session.clear_session end - shared_examples 'secondary that has caught up to a primary' do + context 'when any secondary is caught up' do + let(:load_balancer_result) { ::Gitlab::Database::LoadBalancing::LoadBalancer::ANY_CAUGHT_UP } + it 'continues to use the secondary' do expect(session.use_primary?).to be false expect(subject.match?).to be true @@ -70,7 +77,9 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do end end - shared_examples 'secondary that is lagging primary' do + context 'when all secondaries are lagging behind' do + let(:load_balancer_result) { ::Gitlab::Database::LoadBalancing::LoadBalancer::NONE_CAUGHT_UP } + it 'sticks to the primary' do expect(subject.match?).to be true expect(session.use_primary?).to be true @@ -82,14 +91,6 @@ RSpec.describe Gitlab::Checks::MatchingMergeRequest do .and change { stale_counter.get }.by(1) end end - - it_behaves_like 'secondary that has caught up to a primary' - - context 'on secondary behind primary' do - let(:all_caught_up) { false } - - it_behaves_like 'secondary that is lagging primary' - end end end end diff --git a/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb b/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb index efe99cd276c..1f3ba0ef76e 100644 --- a/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb +++ b/spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata do +RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata, feature_category: :build_artifacts do def metadata(path = '', **opts) described_class.new(metadata_file_stream, path, **opts) end @@ -19,132 +19,158 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata do metadata_file_stream&.close end - context 'metadata file exists' do - describe '#find_entries! empty string' do - subject { metadata('').find_entries! } + describe '#to_entry' do + subject(:entry) { metadata.to_entry } - it 'matches correct paths' do - expect(subject.keys).to contain_exactly 'ci_artifacts.txt', - 'other_artifacts_0.1.2/', - 'rails_sample.jpg', - 'tests_encoding/' - end - - it 'matches metadata for every path' do - expect(subject.keys.count).to eq 4 - end + it { is_expected.to be_an_instance_of(Gitlab::Ci::Build::Artifacts::Metadata::Entry) } - it 'return Hashes for each metadata' do - expect(subject.values).to all(be_kind_of(Hash)) + context 'when given path starts with a ./ prefix' do + it 'instantiates the entry without the ./ prefix from the path' do + meta = metadata("./some/path") + expect(Gitlab::Ci::Build::Artifacts::Metadata::Entry).to receive(:new).with("some/path", {}) + meta.to_entry end end + end - describe '#find_entries! other_artifacts_0.1.2/' do - subject { metadata('other_artifacts_0.1.2/').find_entries! } + describe '#full_version' do + subject { metadata.full_version } - it 'matches correct paths' do - expect(subject.keys) - .to contain_exactly 'other_artifacts_0.1.2/', - 'other_artifacts_0.1.2/doc_sample.txt', - 'other_artifacts_0.1.2/another-subdirectory/' - end - end + it { is_expected.to eq 'GitLab Build Artifacts Metadata 0.0.1' } + end - describe '#find_entries! other_artifacts_0.1.2/another-subdirectory/' do - subject { metadata('other_artifacts_0.1.2/another-subdirectory/').find_entries! } + describe '#version' do + subject { metadata.version } - it 'matches correct paths' do - expect(subject.keys) - .to contain_exactly 'other_artifacts_0.1.2/another-subdirectory/', - 'other_artifacts_0.1.2/another-subdirectory/empty_directory/', - 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' - end - end + it { is_expected.to eq '0.0.1' } + end - describe '#find_entries! recursively for other_artifacts_0.1.2/' do - subject { metadata('other_artifacts_0.1.2/', recursive: true).find_entries! } + describe '#errors' do + subject { metadata.errors } - it 'matches correct paths' do - expect(subject.keys) - .to contain_exactly 'other_artifacts_0.1.2/', - 'other_artifacts_0.1.2/doc_sample.txt', - 'other_artifacts_0.1.2/another-subdirectory/', - 'other_artifacts_0.1.2/another-subdirectory/empty_directory/', - 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' - end - end + it { is_expected.to eq({}) } + end - describe '#to_entry' do - subject { metadata('').to_entry } + describe '#find_entries!' do + let(:recursive) { false } - it { is_expected.to be_an_instance_of(Gitlab::Ci::Build::Artifacts::Metadata::Entry) } - end + subject(:find_entries) { metadata(path, recursive: recursive).find_entries! } - describe '#full_version' do - subject { metadata('').full_version } + context 'when metadata file exists' do + context 'and given path is an empty string' do + let(:path) { '' } - it { is_expected.to eq 'GitLab Build Artifacts Metadata 0.0.1' } - end + it 'returns paths to all files and directories at the root level' do + expect(find_entries.keys).to contain_exactly( + 'ci_artifacts.txt', + 'other_artifacts_0.1.2/', + 'rails_sample.jpg', + 'tests_encoding/' + ) + end - describe '#version' do - subject { metadata('').version } + it 'return Hashes for each metadata' do + expect(find_entries.values).to all(be_kind_of(Hash)) + end + end - it { is_expected.to eq '0.0.1' } - end + shared_examples 'finding entries for a given path' do |options| + let(:path) { "#{options[:path_prefix]}#{target_path}" } + + context 'when given path targets a directory at the root level' do + let(:target_path) { 'other_artifacts_0.1.2/' } + + it 'returns paths to all files and directories at the first level of the directory' do + expect(find_entries.keys).to contain_exactly( + 'other_artifacts_0.1.2/', + 'other_artifacts_0.1.2/doc_sample.txt', + 'other_artifacts_0.1.2/another-subdirectory/' + ) + end + end + + context 'when given path targets a sub-directory' do + let(:target_path) { 'other_artifacts_0.1.2/another-subdirectory/' } + + it 'returns paths to all files and directories at the first level of the sub-directory' do + expect(find_entries.keys).to contain_exactly( + 'other_artifacts_0.1.2/another-subdirectory/', + 'other_artifacts_0.1.2/another-subdirectory/empty_directory/', + 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' + ) + end + end + + context 'when given path targets a directory recursively' do + let(:target_path) { 'other_artifacts_0.1.2/' } + let(:recursive) { true } + + it 'returns all paths recursively within the target directory' do + expect(subject.keys).to contain_exactly( + 'other_artifacts_0.1.2/', + 'other_artifacts_0.1.2/doc_sample.txt', + 'other_artifacts_0.1.2/another-subdirectory/', + 'other_artifacts_0.1.2/another-subdirectory/empty_directory/', + 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' + ) + end + end + end - describe '#errors' do - subject { metadata('').errors } + context 'and given path does not start with a ./ prefix' do + it_behaves_like 'finding entries for a given path', path_prefix: '' + end - it { is_expected.to eq({}) } + context 'and given path starts with a ./ prefix' do + it_behaves_like 'finding entries for a given path', path_prefix: './' + end end - end - context 'metadata file does not exist' do - let(:metadata_file_path) { nil } + context 'when metadata file stream is nil' do + let(:path) { '' } + let(:metadata_file_stream) { nil } - describe '#find_entries!' do it 'raises error' do - expect { metadata.find_entries! }.to raise_error(described_class::InvalidStreamError, /Invalid stream/) + expect { find_entries }.to raise_error(described_class::InvalidStreamError, /Invalid stream/) end end - end - context 'metadata file is invalid' do - let(:metadata_file_path) { Rails.root + 'spec/fixtures/ci_build_artifacts.zip' } + context 'when metadata file is invalid' do + let(:path) { '' } + let(:metadata_file_path) { Rails.root + 'spec/fixtures/ci_build_artifacts.zip' } - describe '#find_entries!' do it 'raises error' do - expect { metadata.find_entries! }.to raise_error(described_class::InvalidStreamError, /not in gzip format/) + expect { find_entries }.to raise_error(described_class::InvalidStreamError, /not in gzip format/) end end - end - context 'generated metadata' do - let(:tmpfile) { Tempfile.new('test-metadata') } - let(:generator) { CiArtifactMetadataGenerator.new(tmpfile) } - let(:entry_count) { 5 } + context 'with generated metadata' do + let(:tmpfile) { Tempfile.new('test-metadata') } + let(:generator) { CiArtifactMetadataGenerator.new(tmpfile) } + let(:entry_count) { 5 } - before do - tmpfile.binmode + before do + tmpfile.binmode - (1..entry_count).each do |index| - generator.add_entry("public/test-#{index}.txt") - end + (1..entry_count).each do |index| + generator.add_entry("public/test-#{index}.txt") + end - generator.write - end + generator.write + end - after do - File.unlink(tmpfile.path) - end + after do + File.unlink(tmpfile.path) + end - describe '#find_entries!' do - it 'reads expected number of entries' do - stream = File.open(tmpfile.path) + describe '#find_entries!' do + it 'reads expected number of entries' do + stream = File.open(tmpfile.path) - metadata = described_class.new(stream, 'public', recursive: true) + metadata = described_class.new(stream, 'public', recursive: true) - expect(metadata.find_entries!.count).to eq entry_count + expect(metadata.find_entries!.count).to eq entry_count + end end end end diff --git a/spec/lib/gitlab/ci/build/duration_parser_spec.rb b/spec/lib/gitlab/ci/build/duration_parser_spec.rb index 7f5ff1eb0ee..bc905aa0a35 100644 --- a/spec/lib/gitlab/ci/build/duration_parser_spec.rb +++ b/spec/lib/gitlab/ci/build/duration_parser_spec.rb @@ -25,8 +25,8 @@ RSpec.describe Gitlab::Ci::Build::DurationParser do it { is_expected.to be_truthy } it 'caches data' do - expect(ChronicDuration).to receive(:parse).with(value).once.and_call_original - expect(ChronicDuration).to receive(:parse).with(other_value).once.and_call_original + expect(ChronicDuration).to receive(:parse).with(value, use_complete_matcher: true).once.and_call_original + expect(ChronicDuration).to receive(:parse).with(other_value, use_complete_matcher: true).once.and_call_original 2.times do expect(described_class.validate_duration(value)).to eq(86400) @@ -41,7 +41,7 @@ RSpec.describe Gitlab::Ci::Build::DurationParser do it { is_expected.to be_falsy } it 'caches data' do - expect(ChronicDuration).to receive(:parse).with(value).once.and_call_original + expect(ChronicDuration).to receive(:parse).with(value, use_complete_matcher: true).once.and_call_original 2.times do expect(described_class.validate_duration(value)).to be_falsey diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb index f4bc706f9b4..97843781891 100644 --- a/spec/lib/gitlab/ci/components/instance_path_spec.rb +++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb @@ -14,125 +14,214 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline end describe 'FQDN path' do - let_it_be(:existing_project) { create(:project, :repository) } - - let(:project_path) { existing_project.full_path } - let(:address) { "acme.com/#{project_path}/component@#{version}" } let(:version) { 'master' } + let(:project_path) { project.full_path } + let(:address) { "acme.com/#{project_path}/secret-detection@#{version}" } + + context 'when the project repository contains a templates directory' do + let_it_be(:project) do + create( + :project, :custom_repo, + files: { + 'templates/secret-detection.yml' => 'image: alpine_1', + 'templates/dast/template.yml' => 'image: alpine_2', + 'templates/dast/another-template.yml' => 'image: alpine_3', + 'templates/dast/another-folder/template.yml' => 'image: alpine_4' + } + ) + end - context 'when project exists' do - it 'provides the expected attributes', :aggregate_failures do - expect(path.project).to eq(existing_project) - expect(path.host).to eq(current_host) - expect(path.sha).to eq(existing_project.commit('master').id) - expect(path.project_file_path).to eq('component/template.yml') + before do + project.add_developer(user) end - context 'when content exists' do - let(:content) { 'image: alpine' } + context 'when user does not have permissions' do + it 'raises an error when fetching the content' do + expect { path.fetch_content!(current_user: build(:user)) } + .to raise_error(Gitlab::Access::AccessDeniedError) + end + end - before do - allow_next_instance_of(Repository) do |instance| - allow(instance) - .to receive(:blob_data_at) - .with(existing_project.commit('master').id, 'component/template.yml') - .and_return(content) - end + context 'when the component is simple (single file template)' do + it 'fetches the component content', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to eq('image: alpine_1') + expect(path.host).to eq(current_host) + expect(path.project_file_path).to eq('templates/secret-detection.yml') + expect(path.project).to eq(project) + expect(path.sha).to eq(project.commit('master').id) end + end - context 'when user has permissions to read code' do - before do - existing_project.add_developer(user) - end + context 'when the component is complex (directory-based template)' do + let(:address) { "acme.com/#{project_path}/dast@#{version}" } + + it 'fetches the component content', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to eq('image: alpine_2') + expect(path.host).to eq(current_host) + expect(path.project_file_path).to eq('templates/dast/template.yml') + expect(path.project).to eq(project) + expect(path.sha).to eq(project.commit('master').id) + end - it 'fetches the content' do - expect(path.fetch_content!(current_user: user)).to eq(content) + context 'when there is an invalid nested component folder' do + let(:address) { "acme.com/#{project_path}/dast/another-folder@#{version}" } + + it 'returns nil' do + expect(path.fetch_content!(current_user: user)).to be_nil end end - context 'when user does not have permissions to download code' do - it 'raises an error when fetching the content' do - expect { path.fetch_content!(current_user: user) } - .to raise_error(Gitlab::Access::AccessDeniedError) + context 'when there is an invalid nested component path' do + let(:address) { "acme.com/#{project_path}/dast/another-template@#{version}" } + + it 'returns nil' do + expect(path.fetch_content!(current_user: user)).to be_nil end end end - end - context 'when project path is nested under a subgroup' do - let(:existing_group) { create(:group, :nested) } - let(:existing_project) { create(:project, :repository, group: existing_group) } + context 'when fetching the latest version of a component' do + let_it_be(:project) do + create( + :project, :custom_repo, + files: { + 'templates/secret-detection.yml' => 'image: alpine_1' + } + ) + end - it 'provides the expected attributes', :aggregate_failures do - expect(path.project).to eq(existing_project) - expect(path.host).to eq(current_host) - expect(path.sha).to eq(existing_project.commit('master').id) - expect(path.project_file_path).to eq('component/template.yml') - end - end + let(:version) { '~latest' } - context 'when current GitLab instance is installed on a relative URL' do - let(:address) { "acme.com/gitlab/#{project_path}/component@#{version}" } - let(:current_host) { 'acme.com/gitlab/' } + let(:latest_sha) do + project.repository.commit('master').id + end - it 'provides the expected attributes', :aggregate_failures do - expect(path.project).to eq(existing_project) - expect(path.host).to eq(current_host) - expect(path.sha).to eq(existing_project.commit('master').id) - expect(path.project_file_path).to eq('component/template.yml') + before do + create(:release, project: project, sha: project.repository.root_ref_sha, + released_at: Time.zone.now - 1.day) + + project.repository.update_file( + user, 'templates/secret-detection.yml', 'image: alpine_2', + message: 'Updates image', branch_name: project.default_branch + ) + + create(:release, project: project, sha: latest_sha, + released_at: Time.zone.now) + end + + it 'fetches the component content', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to eq('image: alpine_2') + expect(path.host).to eq(current_host) + expect(path.project_file_path).to eq('templates/secret-detection.yml') + expect(path.project).to eq(project) + expect(path.sha).to eq(latest_sha) + end end - end - context 'when version does not exist' do - let(:version) { 'non-existent' } + context 'when version does not exist' do + let(:version) { 'non-existent' } - it 'provides the expected attributes', :aggregate_failures do - expect(path.project).to eq(existing_project) - expect(path.host).to eq(current_host) - expect(path.sha).to be_nil - expect(path.project_file_path).to eq('component/template.yml') + it 'returns nil', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to be_nil + expect(path.host).to eq(current_host) + expect(path.project_file_path).to be_nil + expect(path.project).to eq(project) + expect(path.sha).to be_nil + end end - it 'returns nil when fetching the content' do - expect(path.fetch_content!(current_user: user)).to be_nil + context 'when current GitLab instance is installed on a relative URL' do + let(:address) { "acme.com/gitlab/#{project_path}/secret-detection@#{version}" } + let(:current_host) { 'acme.com/gitlab/' } + + it 'fetches the component content', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to eq('image: alpine_1') + expect(path.host).to eq(current_host) + expect(path.project_file_path).to eq('templates/secret-detection.yml') + expect(path.project).to eq(project) + expect(path.sha).to eq(project.commit('master').id) + end end end - context 'when version is `~latest`' do - let(:version) { '~latest' } + # All the following tests are for deprecated code and will be removed + # in https://gitlab.com/gitlab-org/gitlab/-/issues/415855 + context 'when the project does not contain a templates directory' do + let(:project_path) { project.full_path } + let(:address) { "acme.com/#{project_path}/component@#{version}" } + + let_it_be(:project) do + create( + :project, :custom_repo, + files: { + 'component/template.yml' => 'image: alpine' + } + ) + end + + before do + project.add_developer(user) + end - context 'when project has releases' do - let_it_be(:latest_release) do - create(:release, project: existing_project, sha: 'sha-1', released_at: Time.zone.now) - end + it 'fetches the component content', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to eq('image: alpine') + expect(path.host).to eq(current_host) + expect(path.project_file_path).to eq('component/template.yml') + expect(path.project).to eq(project) + expect(path.sha).to eq(project.commit('master').id) + end - before_all do - # Previous release - create(:release, project: existing_project, sha: 'sha-2', released_at: Time.zone.now - 1.day) + context 'when project path is nested under a subgroup' do + let_it_be(:group) { create(:group, :nested) } + let_it_be(:project) do + create( + :project, :custom_repo, + files: { + 'component/template.yml' => 'image: alpine' + }, + group: group + ) end - it 'returns the sha of the latest release' do - expect(path.sha).to eq(latest_release.sha) + it 'fetches the component content', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to eq('image: alpine') + expect(path.host).to eq(current_host) + expect(path.project_file_path).to eq('component/template.yml') + expect(path.project).to eq(project) + expect(path.sha).to eq(project.commit('master').id) end end - context 'when project does not have releases' do - it { expect(path.sha).to be_nil } + context 'when current GitLab instance is installed on a relative URL' do + let(:address) { "acme.com/gitlab/#{project_path}/component@#{version}" } + let(:current_host) { 'acme.com/gitlab/' } + + it 'fetches the component content', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to eq('image: alpine') + expect(path.host).to eq(current_host) + expect(path.project_file_path).to eq('component/template.yml') + expect(path.project).to eq(project) + expect(path.sha).to eq(project.commit('master').id) + end end - end - context 'when project does not exist' do - let(:project_path) { 'non-existent/project' } + context 'when version does not exist' do + let(:version) { 'non-existent' } - it 'provides the expected attributes', :aggregate_failures do - expect(path.project).to be_nil - expect(path.host).to eq(current_host) - expect(path.sha).to be_nil - expect(path.project_file_path).to be_nil + it 'returns nil', :aggregate_failures do + expect(path.fetch_content!(current_user: user)).to be_nil + expect(path.host).to eq(current_host) + expect(path.project_file_path).to be_nil + expect(path.project).to eq(project) + expect(path.sha).to be_nil + end end - it 'returns nil when fetching the content' do - expect(path.fetch_content!(current_user: user)).to be_nil + context 'when user does not have permissions' do + it 'raises an error when fetching the content' do + expect { path.fetch_content!(current_user: build(:user)) } + .to raise_error(Gitlab::Access::AccessDeniedError) + end end end end diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb index 736c184a289..567ffa68836 100644 --- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb @@ -14,7 +14,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do # as they do not have sense in context of Bridge let(:ignored_inheritable_columns) do %i[before_script after_script hooks image services cache interruptible timeout - retry tags artifacts] + retry tags artifacts id_tokens] end end diff --git a/spec/lib/gitlab/ci/config/entry/default_spec.rb b/spec/lib/gitlab/ci/config/entry/default_spec.rb index 46e96843ee3..17e716629cd 100644 --- a/spec/lib/gitlab/ci/config/entry/default_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/default_spec.rb @@ -27,7 +27,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Default do it 'contains the expected node names' do expect(described_class.nodes.keys) .to match_array(%i[before_script after_script hooks cache image services - interruptible timeout retry tags artifacts]) + interruptible timeout retry tags artifacts id_tokens]) end end end diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb index dd15b049b9b..cd8e35ede61 100644 --- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require 'spec_helper' # Change this to fast spec helper when FF `ci_refactor_external_rules` is removed +require 'fast_spec_helper' require_dependency 'active_model' RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category: :pipeline_composition do @@ -14,21 +14,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category entry.compose! end - shared_examples 'a valid config' do + shared_examples 'a valid config' do |expected_value = nil| it { is_expected.to be_valid } it 'returns the expected value' do - expect(entry.value).to eq(config.compact) - end - - context 'when FF `ci_refactor_external_rules` is disabled' do - before do - stub_feature_flags(ci_refactor_external_rules: false) - end - - it 'returns the expected value' do - expect(entry.value).to eq(config) - end + expect(entry.value).to eq(expected_value || config.compact) end end @@ -99,19 +89,37 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category it_behaves_like 'a valid config' - context 'when array' do + context 'when exists: clause is an array' do let(:config) { { exists: ['./this.md', './that.md'] } } it_behaves_like 'a valid config' end - context 'when null' do + context 'when exists: clause is null' do let(:config) { { exists: nil } } it_behaves_like 'a valid config' end end + context 'when specifying a changes: clause' do + let(:config) { { changes: %w[Dockerfile lib/* paths/**/*.rb] } } + + it_behaves_like 'a valid config', { changes: { paths: %w[Dockerfile lib/* paths/**/*.rb] } } + + context 'with paths:' do + let(:config) { { changes: { paths: %w[Dockerfile lib/* paths/**/*.rb] } } } + + it_behaves_like 'a valid config' + end + + context 'with paths: and compare_to:' do + let(:config) { { changes: { paths: ['Dockerfile'], compare_to: 'branch1' } } } + + it_behaves_like 'a valid config' + end + end + context 'when specifying an unknown keyword' do let(:config) { { invalid: :something } } diff --git a/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb index 05db81abfc1..503020e2202 100644 --- a/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/include/rules_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require 'spec_helper' # Change this to fast spec helper when FF `ci_refactor_external_rules` is removed +require 'fast_spec_helper' require_dependency 'active_model' RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pipeline_composition do @@ -50,7 +50,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip entry.compose! end - it_behaves_like 'an invalid config', /contains unknown keys: changes/ + it_behaves_like 'a valid config' end end @@ -80,7 +80,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip let(:config) do [ { if: '$THIS == "that"' }, - { if: '$SKIP', when: 'never' } + { if: '$SKIP', when: 'never' }, + { changes: ['Dockerfile'] } ] end @@ -96,7 +97,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip is_expected.to eq( [ { if: '$THIS == "that"' }, - { if: '$SKIP', when: 'never' } + { if: '$SKIP', when: 'never' }, + { changes: { paths: ['Dockerfile'] } } ] ) end @@ -115,30 +117,5 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules, feature_category: :pip end end end - - context 'when FF `ci_refactor_external_rules` is disabled' do - before do - stub_feature_flags(ci_refactor_external_rules: false) - end - - context 'with an "if"' do - let(:config) do - [{ if: '$THIS == "that"' }] - end - - it { is_expected.to eq(config) } - end - - context 'with a list of two rules' do - let(:config) do - [ - { if: '$THIS == "that"' }, - { if: '$SKIP' } - ] - end - - it { is_expected.to eq(config) } - end - end end end diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb index 4be7c11fab0..1a78d929871 100644 --- a/spec/lib/gitlab/ci/config/entry/job_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_composition do + using RSpec::Parameterized::TableSyntax + let(:entry) { described_class.new(config, name: :rspec) } it_behaves_like 'with inheritable CI config' do @@ -29,7 +31,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo let(:result) do %i[before_script script after_script hooks stage cache image services only except rules needs variables artifacts - environment coverage retry interruptible timeout release tags + coverage retry interruptible timeout release tags inherit parallel] end @@ -696,8 +698,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo end context 'with workflow rules' do - using RSpec::Parameterized::TableSyntax - where(:name, :has_workflow_rules?, :only, :rules, :result) do "uses default only" | false | nil | nil | { refs: %w[branches tags] } "uses user only" | false | %w[branches] | nil | { refs: %w[branches] } @@ -739,6 +739,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo end end + describe '#pages_job?', :aggregate_failures, feature_category: :pages do + where(:name, :result) do + :pages | true + :'pages:staging' | false + :'something:pages:else' | false + end + + with_them do + subject { described_class.new({}, name: name).pages_job? } + + it { is_expected.to eq(result) } + end + end + context 'when composed' do before do entry.compose! diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb index 4f13940d7e2..132e75a808b 100644 --- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb @@ -371,6 +371,39 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable, feature_category: :pipeli end end + context 'with environment' do + context 'when environment name is specified' do + let(:config) { { script: 'ls', environment: 'prod' }.compact } + + it 'sets environment name and action to the entry value' do + entry.compose!(deps) + + expect(entry.value[:environment]).to eq({ action: 'start', name: 'prod' }) + expect(entry.value[:environment_name]).to eq('prod') + end + end + + context 'when environment name, url and action are specified' do + let(:config) do + { + script: 'ls', + environment: { + name: 'staging', + url: 'https://gitlab.com', + action: 'prepare' + } + }.compact + end + + it 'sets environment name, action and url to the entry value' do + entry.compose!(deps) + + expect(entry.value[:environment]).to eq({ action: 'prepare', name: 'staging', url: 'https://gitlab.com' }) + expect(entry.value[:environment_name]).to eq('staging') + end + end + end + context 'with inheritance' do context 'of default:tags' do using RSpec::Parameterized::TableSyntax diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb index d8bd578be94..9ac72ebbac8 100644 --- a/spec/lib/gitlab/ci/config/external/context_spec.rb +++ b/spec/lib/gitlab/ci/config/external/context_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipeline_composition do let(:project) { build(:project) } + let(:pipeline) { double('Pipeline') } let(:user) { double('User') } let(:sha) { '12345' } let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'a', 'value' => 'b' }]) } @@ -11,6 +12,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin let(:attributes) do { project: project, + pipeline: pipeline, user: user, sha: sha, variables: variables, @@ -32,7 +34,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin end context 'without values' do - let(:attributes) { { project: nil, user: nil, sha: nil } } + let(:attributes) { { project: nil, pipeline: nil, user: nil, sha: nil } } it { is_expected.to have_attributes(**attributes) } it { expect(subject.expandset).to eq([]) } @@ -148,6 +150,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin let(:attributes) do { project: project, + pipeline: pipeline, user: user, sha: sha, logger: double('logger') @@ -165,6 +168,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin it { expect(mutated).not_to eq(subject) } it { expect(mutated).to be_a(described_class) } it { expect(mutated).to have_attributes(new_attributes) } + it { expect(mutated.pipeline).to eq(subject.pipeline) } it { expect(mutated.expandset).to eq(subject.expandset) } it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) } it { expect(mutated.logger).to eq(mutated.logger) } diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb index 487690296b5..0f7b811b5df 100644 --- a/spec/lib/gitlab/ci/config/external/file/component_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb @@ -120,6 +120,41 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category: end end + describe '#content' do + context 'when component is valid' do + let(:content) do + <<~COMPONENT + job: + script: echo + COMPONENT + end + + let(:response) do + ServiceResponse.success(payload: { + content: content, + path: instance_double(::Gitlab::Ci::Components::InstancePath, project: project, sha: '12345') + }) + end + + it 'tracks the event' do + expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with('cicd_component_usage', + values: external_resource.context.user.id) + + external_resource.content + end + end + + context 'when component is invalid' do + let(:content) { 'the-content' } + + it 'does not track the event' do + expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event) + + external_resource.content + end + end + end + describe '#metadata' do subject(:metadata) { external_resource.metadata } diff --git a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb index 69b0524be9e..f542c0485e0 100644 --- a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb +++ b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb @@ -409,32 +409,6 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category: expect { process }.to raise_error(expected_error_class) end end - - context 'when introduce_ci_max_total_yaml_size_bytes is disabled' do - before do - stub_feature_flags(introduce_ci_max_total_yaml_size_bytes: false) - end - - context 'when pipeline tree size is within the limit' do - before do - stub_application_setting(ci_max_total_yaml_size_bytes: 10000) - end - - it 'passes the verification' do - expect(process.all?(&:valid?)).to be_truthy - end - end - - context 'when pipeline tree size is larger then the limit' do - before do - stub_application_setting(ci_max_total_yaml_size_bytes: 100) - end - - it 'passes the verification' do - expect(process.all?(&:valid?)).to be_truthy - end - end - end end end end diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb index 19113ce6a4e..68cdf56f198 100644 --- a/spec/lib/gitlab/ci/config/external/processor_spec.rb +++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb @@ -557,21 +557,11 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel context 'when rules defined' do context 'when a rule is invalid' do let(:values) do - { include: [{ local: 'builds.yml', rules: [{ changes: ['$MY_VAR'] }] }] } + { include: [{ local: 'builds.yml', rules: [{ allow_failure: ['$MY_VAR'] }] }] } end it 'raises IncludeError' do - expect { subject }.to raise_error(described_class::IncludeError, /contains unknown keys: changes/) - end - - context 'when FF `ci_refactor_external_rules` is disabled' do - before do - stub_feature_flags(ci_refactor_external_rules: false) - end - - it 'raises IncludeError' do - expect { subject }.to raise_error(described_class::IncludeError, /invalid include rule/) - end + expect { subject }.to raise_error(described_class::IncludeError, /contains unknown keys: allow_failure/) end end end diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb index 8674af7ab65..15d7801ff2a 100644 --- a/spec/lib/gitlab/ci/config/external/rules_spec.rb +++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb @@ -4,76 +4,45 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_composition do let(:context) { double(variables_hash: {}) } - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] } + let(:rule_hashes) {} + let(:pipeline) { instance_double(Ci::Pipeline) } + let_it_be(:project) { create(:project, :custom_repo, files: { 'file.txt' => 'file' }) } subject(:rules) { described_class.new(rule_hashes) } + before do + allow(context).to receive(:project).and_return(project) + allow(context).to receive(:pipeline).and_return(pipeline) + end + describe '#evaluate' do subject(:result) { rules.evaluate(context).pass? } context 'when there is no rule' do - let(:rule_hashes) {} - it { is_expected.to eq(true) } end - shared_examples 'when there is a rule with if' do |rule_matched_result = true, rule_not_matched_result = false| - context 'when the rule matches' do - let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) } - - it { is_expected.to eq(rule_matched_result) } - end - - context 'when the rule does not match' do - let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) } - - it { is_expected.to eq(rule_not_matched_result) } - end - end - - shared_examples 'when there is a rule with exists' do |file_exists_result = true, file_not_exists_result = false| - let(:project) { create(:project, :repository) } - - context 'when the file exists' do - let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['Dockerfile']) } - + shared_examples 'with when: specified' do + context 'with when: never' do before do - project.repository.create_file(project.first_owner, 'Dockerfile', "commit", message: 'test', branch_name: "master") + rule_hashes.first[:when] = 'never' end - it { is_expected.to eq(file_exists_result) } - end - - context 'when the file does not exist' do - let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['test.md']) } - - it { is_expected.to eq(file_not_exists_result) } - end - end - - it_behaves_like 'when there is a rule with if' - - context 'when there is a rule with exists' do - let(:rule_hashes) { [{ exists: 'Dockerfile' }] } - - it_behaves_like 'when there is a rule with exists' - end - - context 'when there is a rule with if and when' do - context 'with when: never' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'never' }] } - - it_behaves_like 'when there is a rule with if', false, false + it { is_expected.to eq(false) } end context 'with when: always' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'always' }] } + before do + rule_hashes.first[:when] = 'always' + end - it_behaves_like 'when there is a rule with if' + it { is_expected.to eq(true) } end context 'with when: ' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] } + before do + rule_hashes.first[:when] = 'on_success' + end it 'raises an error' do expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /when unknown value: on_success/) @@ -81,132 +50,125 @@ RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_ end context 'with when: null' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: nil }] } + before do + rule_hashes.first[:when] = nil + end - it_behaves_like 'when there is a rule with if' + it { is_expected.to eq(true) } end end - context 'when there is a rule with exists and when' do - context 'with when: never' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'never' }] } + context 'when there is a rule with if:' do + let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] } - it_behaves_like 'when there is a rule with exists', false, false - end + context 'when the rule matches' do + let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) } - context 'with when: always' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'always' }] } + it { is_expected.to eq(true) } - it_behaves_like 'when there is a rule with exists' + it_behaves_like 'with when: specified' end - context 'with when: ' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] } + context 'when the rule does not match' do + let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) } - it 'raises an error' do - expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /when unknown value: on_success/) - end + it { is_expected.to eq(false) } end + end - context 'with when: null' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: nil }] } + context 'when there is a rule with exists:' do + let(:rule_hashes) { [{ exists: 'file.txt' }] } - it_behaves_like 'when there is a rule with exists' + context 'when the file exists' do + let(:context) { double(top_level_worktree_paths: ['file.txt']) } + + it { is_expected.to eq(true) } + + it_behaves_like 'with when: specified' end - end - context 'when there is a rule with changes' do - let(:rule_hashes) { [{ changes: ['$MY_VAR'] }] } + context 'when the file does not exist' do + let(:context) { double(top_level_worktree_paths: ['README.md']) } - it 'raises an error' do - expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /contains unknown keys: changes/) + it { is_expected.to eq(false) } end end - context 'when FF `ci_refactor_external_rules` is disabled' do - before do - stub_feature_flags(ci_refactor_external_rules: false) - end + context 'when there is a rule with changes:' do + let(:rule_hashes) { [{ changes: ['file.txt'] }] } - context 'when there is no rule' do - let(:rule_hashes) {} + shared_examples 'when the pipeline has modified paths' do + let(:modified_paths) { ['file.txt'] } - it { is_expected.to eq(true) } - end + before do + allow(pipeline).to receive(:modified_paths).and_return(modified_paths) + end - it_behaves_like 'when there is a rule with if' + context 'when the file has changed' do + it { is_expected.to eq(true) } - context 'when there is a rule with exists' do - let(:rule_hashes) { [{ exists: 'Dockerfile' }] } + it_behaves_like 'with when: specified' + end - it_behaves_like 'when there is a rule with exists' + context 'when the file has not changed' do + let(:modified_paths) { ['README.md'] } + + it { is_expected.to eq(false) } + end end - context 'when there is a rule with if and when' do - context 'with when: never' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'never' }] } + it_behaves_like 'when the pipeline has modified paths' - it_behaves_like 'when there is a rule with if', false, false - end + context 'with paths: specified' do + let(:rule_hashes) { [{ changes: { paths: ['file.txt'] } }] } - context 'with when: always' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'always' }] } + it_behaves_like 'when the pipeline has modified paths' + end - it_behaves_like 'when there is a rule with if' - end + context 'with paths: and compare_to: specified' do + before_all do + project.repository.add_branch(project.owner, 'branch1', 'master') - context 'with when: ' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] } + project.repository.update_file( + project.owner, 'file.txt', 'file updated', message: 'Update file.txt', branch_name: 'branch1' + ) - it 'raises an error' do - expect { result }.to raise_error(described_class::InvalidIncludeRulesError, - 'invalid include rule: {:if=>"$MY_VAR == \"hello\"", :when=>"on_success"}') - end + project.repository.add_branch(project.owner, 'branch2', 'branch1') end - context 'with when: null' do - let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: nil }] } - - it_behaves_like 'when there is a rule with if' + let_it_be(:pipeline) do + build(:ci_pipeline, project: project, ref: 'branch2', sha: project.commit('branch2').sha) end - end - context 'when there is a rule with exists and when' do - context 'with when: never' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'never' }] } + context 'when the file has changed compared to the given ref' do + let(:rule_hashes) { [{ changes: { paths: ['file.txt'], compare_to: 'master' } }] } + + it { is_expected.to eq(true) } - it_behaves_like 'when there is a rule with exists', false, false + it_behaves_like 'with when: specified' end - context 'with when: always' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'always' }] } + context 'when the file has not changed compared to the given ref' do + let(:rule_hashes) { [{ changes: { paths: ['file.txt'], compare_to: 'branch1' } }] } - it_behaves_like 'when there is a rule with exists' + it { is_expected.to eq(false) } end - context 'with when: ' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: 'on_success' }] } + context 'when compare_to: is invalid' do + let(:rule_hashes) { [{ changes: { paths: ['file.txt'], compare_to: 'invalid' } }] } it 'raises an error' do - expect { result }.to raise_error(described_class::InvalidIncludeRulesError, - 'invalid include rule: {:exists=>"Dockerfile", :when=>"on_success"}') + expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /compare_to is not a valid ref/) end end - - context 'with when: null' do - let(:rule_hashes) { [{ exists: 'Dockerfile', when: nil }] } - - it_behaves_like 'when there is a rule with exists' - end end + end - context 'when there is a rule with changes' do - let(:rule_hashes) { [{ changes: ['$MY_VAR'] }] } + context 'when there is a rule with an invalid key' do + let(:rule_hashes) { [{ invalid: ['$MY_VAR'] }] } - it 'raises an error' do - expect { result }.to raise_error(described_class::InvalidIncludeRulesError, - 'invalid include rule: {:changes=>["$MY_VAR"]}') - end + it 'raises an error' do + expect { result }.to raise_error(described_class::InvalidIncludeRulesError, /contains unknown keys: invalid/) end end end diff --git a/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb index 7bb09d35064..804164c933a 100644 --- a/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb +++ b/spec/lib/gitlab/ci/config/interpolation/interpolator_spec.rb @@ -57,7 +57,8 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::Interpolator, feature_category expect(subject).not_to be_valid expect(subject.error_message).to eq subject.errors.first - expect(subject.errors).to include('unknown input arguments') + expect(subject.errors).to include('Given inputs not defined in the `spec` section of the included ' \ + 'configuration file') end end diff --git a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb index bf89942bf14..0af1b721eb6 100644 --- a/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb +++ b/spec/lib/gitlab/ci/config/yaml/tags/reference_spec.rb @@ -2,9 +2,9 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Reference do +RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Reference, feature_category: :pipeline_composition do let(:config) do - Gitlab::Ci::Config::Yaml.load!(yaml) + Gitlab::Ci::Config::Yaml::Loader.new(yaml).load.content end describe '.tag' do diff --git a/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb b/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb index 594242c33cc..74d7513ebdf 100644 --- a/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb +++ b/spec/lib/gitlab/ci/config/yaml/tags/resolver_spec.rb @@ -2,9 +2,9 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Resolver do +RSpec.describe Gitlab::Ci::Config::Yaml::Tags::Resolver, feature_category: :pipeline_composition do let(:config) do - Gitlab::Ci::Config::Yaml.load!(yaml) + Gitlab::Ci::Config::Yaml::Loader.new(yaml).load.content end describe '#to_hash' do diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb index d06537ac330..a331af9a9ac 100644 --- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb +++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb @@ -3,18 +3,20 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependency_management do - let(:report) { instance_double('Gitlab::Ci::Reports::Sbom::Report') } + let(:report) { Gitlab::Ci::Reports::Sbom::Report.new } let(:report_data) { base_report_data } let(:raw_report_data) { report_data.to_json } let(:report_valid?) { true } let(:validator_errors) { [] } let(:properties_parser) { class_double('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties') } + let(:uuid) { 'c9d550a3-feb8-483b-a901-5aa892d039f9' } let(:base_report_data) do { 'bomFormat' => 'CycloneDX', 'specVersion' => '1.4', - 'version' => 1 + 'version' => 1, + 'serialNumber' => "urn:uuid:#{uuid}" } end @@ -28,6 +30,7 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen allow(properties_parser).to receive(:parse_source) stub_const('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties', properties_parser) + allow(SecureRandom).to receive(:uuid).and_return(uuid) end context 'when report JSON is invalid' do @@ -149,8 +152,22 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen end end - context 'when report has metadata properties' do - let(:report_data) { base_report_data.merge({ 'metadata' => { 'properties' => properties } }) } + context 'when report has metadata tools, author and properties' do + let(:report_data) { base_report_data.merge(metadata) } + + let(:tools) do + [ + { name: 'Gemnasium', vendor: 'vendor-1', version: '2.34.0' }, + { name: 'Gemnasium', vendor: 'vendor-2', version: '2.34.0' } + ] + end + + let(:authors) do + [ + { name: 'author-1', email: 'support@gitlab.com' }, + { name: 'author-2', email: 'support@gitlab.com' } + ] + end let(:properties) do [ @@ -163,10 +180,44 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen ] end - it 'passes them to the properties parser' do - expect(properties_parser).to receive(:parse_source).with(properties) + context 'when metadata attributes are present' do + let(:metadata) do + { + 'metadata' => { + 'tools' => tools, + 'authors' => authors, + 'properties' => properties + } + } + end - parse! + it 'passes them to the report' do + expect(properties_parser).to receive(:parse_source).with(properties) + + parse! + + expect(report.metadata).to have_attributes( + tools: tools.map(&:with_indifferent_access), + authors: authors.map(&:with_indifferent_access), + properties: properties.map(&:with_indifferent_access) + ) + end + end + + context 'when metadata attributes are not present' do + let(:metadata) { { 'metadata' => {} } } + + it 'passes them to the report' do + expect(properties_parser).to receive(:parse_source).with(nil) + + parse! + + expect(report.metadata).to have_attributes( + tools: [], + authors: [], + properties: [] + ) + end end end end diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb index dc16ddf4e0e..9470d59f502 100644 --- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb +++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb @@ -229,8 +229,9 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera describe 'parsing finding.details' do context 'when details are provided' do + let(:finding) { report.findings[4] } + it 'sets details from the report' do - finding = report.findings.find { |x| x.compare_key == 'CVE-1020' } expected_details = Gitlab::Json.parse(finding.raw_metadata)['details'] expect(finding.details).to eq(expected_details) @@ -238,8 +239,9 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera end context 'when details are not provided' do + let(:finding) { report.findings[5] } + it 'sets empty hash' do - finding = report.findings.find { |x| x.compare_key == 'CVE-1030' } expect(finding.details).to eq({}) end end diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb index 5f87e0ccc33..54e569f424b 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -1081,6 +1081,126 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co end end + context 'with a rule using CI_ENVIRONMENT_ACTION variable' do + let(:rule_set) do + [{ if: '$CI_ENVIRONMENT_ACTION == "start"' }] + end + + context 'when environment:action satisfies the rule' do + let(:attributes) do + { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success', + options: { environment: { action: 'start' } } } + end + + it { is_expected.to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'on_success') + end + end + + context 'when environment:action does not satisfy rule' do + let(:attributes) do + { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success', + options: { environment: { action: 'stop' } } } + end + + it { is_expected.not_to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end + end + + context 'when environment:action is not set' do + it { is_expected.not_to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end + end + end + + context 'with a rule using CI_ENVIRONMENT_TIER variable' do + let(:rule_set) do + [{ if: '$CI_ENVIRONMENT_TIER == "production"' }] + end + + context 'when environment:deployment_tier satisfies the rule' do + let(:attributes) do + { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success', + options: { environment: { deployment_tier: 'production' } } } + end + + it { is_expected.to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'on_success') + end + end + + context 'when environment:deployment_tier does not satisfy rule' do + let(:attributes) do + { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success', + options: { environment: { deployment_tier: 'development' } } } + end + + it { is_expected.not_to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end + end + + context 'when environment:action is not set' do + it { is_expected.not_to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end + end + end + + context 'with a rule using CI_ENVIRONMENT_URL variable' do + let(:rule_set) do + [{ if: '$CI_ENVIRONMENT_URL == "http://gitlab.com"' }] + end + + context 'when environment:url satisfies the rule' do + let(:attributes) do + { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success', + options: { environment: { url: 'http://gitlab.com' } } } + end + + it { is_expected.to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'on_success') + end + end + + context 'when environment:url does not satisfy rule' do + let(:attributes) do + { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success', + options: { environment: { url: 'http://staging.gitlab.com' } } } + end + + it { is_expected.not_to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end + end + + context 'when environment:action is not set' do + it { is_expected.not_to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end + end + end + context 'with no rules' do let(:rule_set) { [] } diff --git a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb index d62d25aeefe..4c9fd00f96a 100644 --- a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb +++ b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb @@ -49,6 +49,18 @@ RSpec.describe Gitlab::Ci::Reports::Sbom::Component, feature_category: :dependen end end + describe '#purl_type' do + subject { component.purl_type } + + it { is_expected.to eq(purl_type) } + end + + describe '#type' do + subject { component.type } + + it { is_expected.to eq(component_type) } + end + describe '#<=>' do where do { diff --git a/spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb b/spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb new file mode 100644 index 00000000000..fe0b9481039 --- /dev/null +++ b/spec/lib/gitlab/ci/reports/sbom/metadata_spec.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Reports::Sbom::Metadata, feature_category: :dependency_management do + let(:tools) do + [ + { + vendor: "vendor", + name: "Gemnasium", + version: "2.34.0" + } + ] + end + + let(:authors) do + [ + { + name: "author_name", + email: "support@gitlab.com" + } + ] + end + + let(:properties) do + [ + { + name: "property_name", + value: "package-lock.json" + } + ] + end + + let(:timestamp) { "2020-04-13T20:20:39+00:00" } + + subject(:metadata) do + metadata = described_class.new( + tools: tools, + authors: authors, + properties: properties + ) + metadata.timestamp = timestamp + metadata + end + + it 'has correct attributes' do + expect(metadata).to have_attributes( + tools: tools, + authors: authors, + properties: properties, + timestamp: timestamp + ) + end +end diff --git a/spec/lib/gitlab/ci/templates/MATLAB_spec.rb b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb index 3889d1fc8c9..8b6ff7f27a2 100644 --- a/spec/lib/gitlab/ci/templates/MATLAB_spec.rb +++ b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb @@ -20,7 +20,7 @@ RSpec.describe 'MATLAB.gitlab-ci.yml' do end it 'creates all jobs' do - expect(build_names).to include('command', 'test', 'test_artifacts') + expect(build_names).to include('command', 'test', 'test_artifacts', 'build') end end end diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb index d65b6fb41f6..9439d29aa11 100644 --- a/spec/lib/gitlab/ci/trace/stream_spec.rb +++ b/spec/lib/gitlab/ci/trace/stream_spec.rb @@ -243,6 +243,56 @@ RSpec.describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do expect(result.encoding).to eq(Encoding.default_external) end end + + context 'limit max size' do + before do + # specifying BUFFER_SIZE forces to seek backwards + allow(described_class).to receive(:BUFFER_SIZE) + .and_return(2) + end + + it 'returns every lines with respect of the size' do + all_lines = lines.join + max_size = all_lines.bytesize.div(2) + result = stream.raw(max_size: max_size) + + expect(result.bytes).to eq(all_lines.bytes[-max_size..]) + expect(result.lines.count).to be > 1 + expect(result.encoding).to eq(Encoding.default_external) + end + + it 'returns everything if trying to get too many bytes' do + all_lines = lines.join + result = stream.raw(max_size: all_lines.bytesize * 2) + + expect(result).to eq(all_lines) + expect(result.encoding).to eq(Encoding.default_external) + end + end + + context 'limit max lines and max size' do + before do + # specifying BUFFER_SIZE forces to seek backwards + allow(described_class).to receive(:BUFFER_SIZE) + .and_return(2) + end + + it 'returns max lines if max size is greater' do + result = stream.raw(last_lines: 2, max_size: lines.join.bytesize * 2) + + expect(result).to eq(lines.last(2).join) + expect(result.encoding).to eq(Encoding.default_external) + end + + it 'returns max size if max lines is greater' do + all_lines = lines.join + max_size = all_lines.bytesize.div(2) + result = stream.raw(last_lines: lines.size * 2, max_size: max_size) + + expect(result.bytes).to eq(all_lines.bytes[-max_size..]) + expect(result.encoding).to eq(Encoding.default_external) + end + end end let(:path) { __FILE__ } diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb index 0880c556523..860a1fd30bd 100644 --- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb +++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb @@ -108,12 +108,17 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url, 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s, 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => '', + 'CI_MERGE_REQUEST_SOURCE_BRANCH_PROTECTED' => ProtectedBranch.protected?( + merge_request.source_project, + merge_request.source_branch + ).to_s, 'CI_MERGE_REQUEST_TITLE' => merge_request.title, 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list, 'CI_MERGE_REQUEST_MILESTONE' => milestone.title, 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','), 'CI_MERGE_REQUEST_EVENT_TYPE' => 'detached', - 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true)) + 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true)), + 'CI_MERGE_REQUEST_SQUASH_ON_MERGE' => merge_request.squash_on_merge?.to_s end it 'exposes diff variables' do diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb index 3411426fcdb..af745c75f42 100644 --- a/spec/lib/gitlab/ci/variables/builder_spec.rb +++ b/spec/lib/gitlab/ci/variables/builder_spec.rb @@ -10,18 +10,27 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur let_it_be(:user) { create(:user) } let_it_be_with_reload(:job) do create(:ci_build, + :with_deployment, name: 'rspec:test 1', pipeline: pipeline, user: user, yaml_variables: [{ key: 'YAML_VARIABLE', value: 'value' }], - environment: 'test' + environment: 'review/$CI_COMMIT_REF_NAME', + options: { + environment: { + name: 'review/$CI_COMMIT_REF_NAME', + action: 'prepare', + deployment_tier: 'testing', + url: 'https://gitlab.com' + } + } ) end let(:builder) { described_class.new(pipeline) } describe '#scoped_variables' do - let(:environment) { job.expanded_environment_name } + let(:environment_name) { job.expanded_environment_name } let(:dependencies) { true } let(:predefined_variables) do [ @@ -34,7 +43,13 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur { key: 'CI_NODE_TOTAL', value: '1' }, { key: 'CI_ENVIRONMENT_NAME', - value: 'test' }, + value: 'review/master' }, + { key: 'CI_ENVIRONMENT_ACTION', + value: 'prepare' }, + { key: 'CI_ENVIRONMENT_TIER', + value: 'testing' }, + { key: 'CI_ENVIRONMENT_URL', + value: 'https://gitlab.com' }, { key: 'CI', value: 'true' }, { key: 'GITLAB_CI', @@ -150,7 +165,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur ].map { |var| var.merge(public: true, masked: false) } end - subject { builder.scoped_variables(job, environment: environment, dependencies: dependencies) } + subject { builder.scoped_variables(job, environment: environment_name, dependencies: dependencies) } it { is_expected.to be_instance_of(Gitlab::Ci::Variables::Collection) } diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index f8f1d71e773..c09c0b31e97 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -794,6 +794,28 @@ module Gitlab it_behaves_like 'returns errors', 'test_job_1 has the following needs duplicated: test_job_2.' end + + context 'when needed job name is too long' do + let(:job_name) { 'a' * (::Ci::BuildNeed::MAX_JOB_NAME_LENGTH + 1) } + + let(:config) do + <<-EOYML + lint_job: + script: 'echo lint_job' + rules: + - if: $var == null + needs: [#{job_name}] + #{job_name}: + script: 'echo job' + EOYML + end + + it 'returns an error' do + expect(subject.errors).to include( + "lint_job job: need `#{job_name}` name is too long (maximum is #{::Ci::BuildNeed::MAX_JOB_NAME_LENGTH} characters)" + ) + end + end end context 'rule needs as hash' do @@ -2020,6 +2042,52 @@ module Gitlab end end + describe 'id_tokens' do + subject(:execute) { described_class.new(config).execute } + + let(:build) { execute.builds.first } + let(:id_tokens_vars) { { ID_TOKEN_1: { aud: 'http://gcp.com' } } } + let(:job_id_tokens_vars) { { ID_TOKEN_2: { aud: 'http://job.com' } } } + + context 'when defined on job level' do + let(:config) do + YAML.dump({ + rspec: { script: 'rspec', id_tokens: id_tokens_vars } + }) + end + + it 'returns defined id_tokens' do + expect(build[:id_tokens]).to eq(id_tokens_vars) + end + end + + context 'when defined as default' do + let(:config) do + YAML.dump({ + default: { id_tokens: id_tokens_vars }, + rspec: { script: 'rspec' } + }) + end + + it 'returns inherited by default id_tokens' do + expect(build[:id_tokens]).to eq(id_tokens_vars) + end + end + + context 'when defined as default and on job level' do + let(:config) do + YAML.dump({ + default: { id_tokens: id_tokens_vars }, + rspec: { script: 'rspec', id_tokens: job_id_tokens_vars } + }) + end + + it 'overrides default and returns defined on job level' do + expect(build[:id_tokens]).to eq(job_id_tokens_vars) + end + end + end + describe "Artifacts" do it "returns artifacts when defined" do config = YAML.dump( @@ -2553,6 +2621,60 @@ module Gitlab scheduling_type: :dag ) end + + context 'when expanded job name is too long' do + let(:parallel_job_name) { 'a' * ::Ci::BuildNeed::MAX_JOB_NAME_LENGTH } + let(:needs) { [parallel_job_name] } + + before do + config[parallel_job_name] = { stage: 'build', script: 'test', parallel: 1 } + end + + it 'returns an error' do + expect(subject.errors).to include( + "test1 job: need `#{parallel_job_name} 1/1` name is too long (maximum is #{::Ci::BuildNeed::MAX_JOB_NAME_LENGTH} characters)" + ) + end + end + + context 'when parallel job has matrix specified' do + let(:var1) { '1' } + let(:var2) { '2' } + + before do + config[:parallel] = { stage: 'build', script: 'test', parallel: { matrix: [{ VAR1: var1, VAR2: var2 }] } } + end + + it 'does create jobs with valid specification' do + expect(subject.builds.size).to eq(6) + expect(subject.builds[3]).to eq( + stage: 'test', + stage_idx: 2, + name: 'test1', + only: { refs: %w[branches tags] }, + options: { script: ['test'] }, + needs_attributes: [ + { name: 'parallel: [1, 2]', artifacts: true, optional: false } + ], + when: "on_success", + allow_failure: false, + job_variables: [], + root_variables_inheritance: true, + scheduling_type: :dag + ) + end + + context 'when expanded job name is too long' do + let(:var1) { '1' * (::Ci::BuildNeed::MAX_JOB_NAME_LENGTH / 2) } + let(:var2) { '2' * (::Ci::BuildNeed::MAX_JOB_NAME_LENGTH / 2) } + + it 'returns an error' do + expect(subject.errors).to include( + "test1 job: need `parallel: [#{var1}, #{var2}]` name is too long (maximum is #{::Ci::BuildNeed::MAX_JOB_NAME_LENGTH} characters)" + ) + end + end + end end context 'needs dependencies artifacts' do diff --git a/spec/lib/gitlab/composer/version_index_spec.rb b/spec/lib/gitlab/composer/version_index_spec.rb index a4d016636aa..63efa8cae95 100644 --- a/spec/lib/gitlab/composer/version_index_spec.rb +++ b/spec/lib/gitlab/composer/version_index_spec.rb @@ -2,52 +2,111 @@ require 'spec_helper' -RSpec.describe Gitlab::Composer::VersionIndex do +RSpec.describe Gitlab::Composer::VersionIndex, feature_category: :package_registry do let_it_be(:package_name) { 'sample-project' } let_it_be(:json) { { 'name' => package_name } } let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) } + let_it_be(:files) { { 'composer.json' => json.to_json } } + let_it_be_with_reload(:project) { create(:project, :public, :custom_repo, files: files, group: group) } let_it_be(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) } let_it_be(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) } + let(:url) { "http://localhost/#{group.path}/#{project.path}.git" } let(:branch) { project.repository.find_branch('master') } - let(:packages) { [package1, package2] } describe '#as_json' do + let(:index) { described_class.new(packages).as_json } + let(:ssh_path_prefix) { 'username@localhost:' } + subject(:package_index) { index['packages'][package_name] } - let(:index) { described_class.new(packages).as_json } + before do + allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix) + .and_return(ssh_path_prefix) + end + + shared_examples 'returns the packages json' do + def expected_json(package) + { + 'dist' => { + 'reference' => branch.target, + 'shasum' => '', + 'type' => 'zip', + 'url' => "http://localhost/api/v4/projects/#{project.id}/packages/composer/archives/#{package.name}.zip?sha=#{branch.target}" + }, + 'source' => { + 'reference' => branch.target, + 'type' => 'git', + 'url' => url + }, + 'name' => package.name, + 'uid' => package.id, + 'version' => package.version + } + end + + it 'returns the packages json' do + expect(package_index['1.0.0']).to eq(expected_json(package1)) + expect(package_index['2.0.0']).to eq(expected_json(package2)) + end + + context 'with an unordered list of packages' do + let(:packages) { [package2, package1] } + + it 'returns the packages sorted by version' do + expect(package_index.keys).to eq ['1.0.0', '2.0.0'] + end + end + end + + context 'with a public project' do + it_behaves_like 'returns the packages json' + end + + context 'with an internal project' do + let(:url) { "#{ssh_path_prefix}#{group.path}/#{project.path}.git" } + + before do + project.update!(visibility: Gitlab::VisibilityLevel::INTERNAL) + end - def expected_json(package) - { - 'dist' => { - 'reference' => branch.target, - 'shasum' => '', - 'type' => 'zip', - 'url' => "http://localhost/api/v4/projects/#{project.id}/packages/composer/archives/#{package.name}.zip?sha=#{branch.target}" - }, - 'source' => { - 'reference' => branch.target, - 'type' => 'git', - 'url' => project.http_url_to_repo - }, - 'name' => package.name, - 'uid' => package.id, - 'version' => package.version - } + it_behaves_like 'returns the packages json' end - it 'returns the packages json' do - expect(package_index['1.0.0']).to eq(expected_json(package1)) - expect(package_index['2.0.0']).to eq(expected_json(package2)) + context 'with a private project' do + let(:url) { "#{ssh_path_prefix}#{group.path}/#{project.path}.git" } + + before do + project.update!(visibility: Gitlab::VisibilityLevel::PRIVATE) + end + + it_behaves_like 'returns the packages json' end - context 'with an unordered list of packages' do - let(:packages) { [package2, package1] } + context 'with composer_use_ssh_source_urls disabled' do + before do + stub_feature_flags(composer_use_ssh_source_urls: false) + end + + context 'with a public project' do + it_behaves_like 'returns the packages json' + end + + context 'with an internal project' do + before do + project.update!(visibility: Gitlab::VisibilityLevel::INTERNAL) + end + + it_behaves_like 'returns the packages json' + end + + context 'with a private project' do + before do + project.update!(visibility: Gitlab::VisibilityLevel::PRIVATE) + end - it 'returns the packages sorted by version' do - expect(package_index.keys).to eq ['1.0.0', '2.0.0'] + it_behaves_like 'returns the packages json' end end end diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index 6d24ced138e..3682a654181 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -80,6 +80,10 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader, feature_category: :s let(:style_src) { directives['style_src'] } let(:worker_src) { directives['worker_src'] } + before do + stub_env('GITLAB_ANALYTICS_URL', nil) + end + it 'returns default directives' do directive_names = (described_class::DIRECTIVES - ['report_uri']) directive_names.each do |directive| @@ -542,6 +546,58 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader, feature_category: :s end end end + + describe 'browsersdk_tracking' do + let(:analytics_url) { 'https://analytics.gitlab.com' } + let(:is_gitlab_com) { true } + + before do + allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) + end + + context 'when browsersdk_tracking is enabled, GITLAB_ANALYTICS_URL is set, and Gitlab.com? is true' do + before do + stub_env('GITLAB_ANALYTICS_URL', analytics_url) + end + + it 'adds GITLAB_ANALYTICS_URL to connect-src' do + expect(connect_src).to include(analytics_url) + end + end + + context 'when Gitlab.com? is false' do + let(:is_gitlab_com) { false } + + before do + stub_env('GITLAB_ANALYTICS_URL', analytics_url) + end + + it 'does not add GITLAB_ANALYTICS_URL to connect-src' do + expect(connect_src).not_to include(analytics_url) + end + end + + context 'when browsersdk_tracking is disabled' do + before do + stub_feature_flags(browsersdk_tracking: false) + stub_env('GITLAB_ANALYTICS_URL', analytics_url) + end + + it 'does not add GITLAB_ANALYTICS_URL to connect-src' do + expect(connect_src).not_to include(analytics_url) + end + end + + context 'when GITLAB_ANALYTICS_URL is not set' do + before do + stub_env('GITLAB_ANALYTICS_URL', nil) + end + + it 'does not add GITLAB_ANALYTICS_URL to connect-src' do + expect(connect_src).not_to include(analytics_url) + end + end + end end describe '#load' do diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb index 0e93a85764f..df1b12e479f 100644 --- a/spec/lib/gitlab/current_settings_spec.rb +++ b/spec/lib/gitlab/current_settings_spec.rb @@ -2,15 +2,14 @@ require 'spec_helper' -RSpec.describe Gitlab::CurrentSettings do +RSpec.describe Gitlab::CurrentSettings, feature_category: :shared do before do stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') end shared_context 'with settings in cache' do before do - create(:application_setting) - described_class.current_application_settings # warm the cache + 2.times { described_class.current_application_settings } # warm the cache end end @@ -29,7 +28,7 @@ RSpec.describe Gitlab::CurrentSettings do context 'when there are allowed domains' do before do - create(:application_setting, domain_allowlist: ['www.gitlab.com']) + stub_application_setting(domain_allowlist: ['www.gitlab.com']) end it { is_expected.to be_truthy } @@ -37,7 +36,7 @@ RSpec.describe Gitlab::CurrentSettings do context 'when there are email restrictions' do before do - create(:application_setting, email_restrictions_enabled: true) + stub_application_setting(email_restrictions_enabled: true) end it { is_expected.to be_truthy } @@ -45,7 +44,7 @@ RSpec.describe Gitlab::CurrentSettings do context 'when the admin has to approve signups' do before do - create(:application_setting, require_admin_approval_after_user_signup: true) + stub_application_setting(require_admin_approval_after_user_signup: true) end it { is_expected.to be_truthy } @@ -53,7 +52,7 @@ RSpec.describe Gitlab::CurrentSettings do context 'when new users are set to external' do before do - create(:application_setting, user_default_external: true) + stub_application_setting(user_default_external: true) end it { is_expected.to be_truthy } @@ -61,7 +60,7 @@ RSpec.describe Gitlab::CurrentSettings do context 'when there are no restrictions' do before do - create(:application_setting, domain_allowlist: [], email_restrictions_enabled: false, require_admin_approval_after_user_signup: false, user_default_external: false) + stub_application_setting(domain_allowlist: [], email_restrictions_enabled: false, require_admin_approval_after_user_signup: false, user_default_external: false) end it { is_expected.to be_falsey } @@ -73,7 +72,7 @@ RSpec.describe Gitlab::CurrentSettings do context 'when signup is enabled' do before do - create(:application_setting, signup_enabled: true) + stub_application_setting(signup_enabled: true) end it { is_expected.to be_falsey } @@ -81,7 +80,7 @@ RSpec.describe Gitlab::CurrentSettings do context 'when signup is disabled' do before do - create(:application_setting, signup_enabled: false) + stub_application_setting(signup_enabled: false) end it { is_expected.to be_truthy } @@ -90,11 +89,9 @@ RSpec.describe Gitlab::CurrentSettings do describe '#current_application_settings', :use_clean_rails_memory_store_caching do it 'allows keys to be called directly' do - db_settings = create(:application_setting, - home_page_url: 'http://mydomain.com', - signup_enabled: false) + described_class.update!(home_page_url: 'http://mydomain.com', signup_enabled: false) - expect(described_class.home_page_url).to eq(db_settings.home_page_url) + expect(described_class.home_page_url).to eq('http://mydomain.com') expect(described_class.signup_enabled?).to be_falsey expect(described_class.signup_enabled).to be_falsey expect(described_class.metrics_sample_interval).to be(15) @@ -253,12 +250,14 @@ RSpec.describe Gitlab::CurrentSettings do end context 'with an existing ApplicationSetting DB record' do - let!(:db_settings) { ApplicationSetting.build_from_defaults(home_page_url: 'http://mydomain.com').save! && ApplicationSetting.last } + before do + described_class.update!(home_page_url: 'http://mydomain.com') + end it_behaves_like 'a non-persisted ApplicationSetting object' it 'uses the value from the DB attribute if present and not overridden by an accessor' do - expect(current_settings.home_page_url).to eq(db_settings.home_page_url) + expect(current_settings.home_page_url).to eq('http://mydomain.com') end end end @@ -277,10 +276,11 @@ RSpec.describe Gitlab::CurrentSettings do describe '#current_application_settings?', :use_clean_rails_memory_store_caching do before do allow(described_class).to receive(:current_application_settings?).and_call_original + ApplicationSetting.delete_all # ensure no settings exist end it 'returns true when settings exist' do - create(:application_setting, + described_class.update!( home_page_url: 'http://mydomain.com', signup_enabled: false) diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb index bbcfa1973ea..bf97f40e97f 100644 --- a/spec/lib/gitlab/data_builder/deployment_spec.rb +++ b/spec/lib/gitlab/data_builder/deployment_spec.rb @@ -50,6 +50,15 @@ RSpec.describe Gitlab::DataBuilder::Deployment, feature_category: :continuous_de expect(data[:deployable_url]).to be_nil end + it 'does not include the deployable URL when deployable is bridge' do + project = create(:project, :repository) + bridge = create(:ci_bridge, project: project) + deployment = create(:deployment, status: :failed, project: project, deployable: bridge) + data = described_class.build(deployment, 'failed', Time.current) + + expect(data[:deployable_url]).to be_nil + end + context 'when commit does not exist in the repository' do let_it_be(:project) { create(:project, :repository) } let_it_be(:deployment) { create(:deployment, project: project) } diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb index 309bbf1e3f0..c5a20b5ef3d 100644 --- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb @@ -141,6 +141,14 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers, feature_categor expect { migration.prepare_async_index(table_name, 'id') }.not_to raise_error end end + + context 'when the target table does not exist' do + it 'raises an error' do + expect { migration.prepare_async_index(:non_existent_table, 'id') }.to( + raise_error("Table non_existent_table does not exist") + ) + end + end end describe '#prepare_async_index_from_sql' do diff --git a/spec/lib/gitlab/database/click_house_client_spec.rb b/spec/lib/gitlab/database/click_house_client_spec.rb index 50086795b2b..6e63ae56557 100644 --- a/spec/lib/gitlab/database/click_house_client_spec.rb +++ b/spec/lib/gitlab/database/click_house_client_spec.rb @@ -2,98 +2,135 @@ require 'spec_helper' -RSpec.describe 'ClickHouse::Client', feature_category: :database do - context 'when click_house spec tag is not added' do - it 'does not have any ClickHouse databases configured' do - databases = ClickHouse::Client.configuration.databases +RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database do + it 'has a ClickHouse database configured' do + databases = ClickHouse::Client.configuration.databases - expect(databases).to be_empty - end + expect(databases).not_to be_empty end - describe 'when click_house spec tag is added', :click_house do - it 'has a ClickHouse database configured' do - databases = ClickHouse::Client.configuration.databases - - expect(databases).not_to be_empty - end + it 'does not return data via `execute` method' do + result = ClickHouse::Client.execute("SELECT 1 AS value", :main) - it 'does not return data via `execute` method' do - result = ClickHouse::Client.execute("SELECT 1 AS value", :main) + # does not return data, just true if successful. Otherwise error. + expect(result).to eq(true) + end - # does not return data, just true if successful. Otherwise error. - expect(result).to eq(true) - end + describe 'data manipulation' do + describe 'inserting' do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project) } + + let_it_be(:author1) { create(:user).tap { |u| project.add_developer(u) } } + let_it_be(:author2) { create(:user).tap { |u| project.add_developer(u) } } + + let_it_be(:issue1) { create(:issue, project: project) } + let_it_be(:issue2) { create(:issue, project: project) } + let_it_be(:merge_request) { create(:merge_request, source_project: project) } + + let_it_be(:event1) { create(:event, :created, target: issue1, author: author1) } + let_it_be(:event2) { create(:event, :closed, target: issue2, author: author2) } + let_it_be(:event3) { create(:event, :merged, target: merge_request, author: author1) } + + let(:events) { [event1, event2, event3] } + + def format_row(event) + path = event.project.reload.project_namespace.traversal_ids.join('/') + + action = Event.actions[event.action] + [ + event.id, + "'#{path}/'", + event.author_id, + event.target_id, + "'#{event.target_type}'", + action, + event.created_at.to_f, + event.updated_at.to_f + ].join(',') + end - describe 'data manipulation' do - describe 'inserting' do - let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project) } - - let_it_be(:author1) { create(:user).tap { |u| project.add_developer(u) } } - let_it_be(:author2) { create(:user).tap { |u| project.add_developer(u) } } - - let_it_be(:issue1) { create(:issue, project: project) } - let_it_be(:issue2) { create(:issue, project: project) } - let_it_be(:merge_request) { create(:merge_request, source_project: project) } - - let_it_be(:event1) { create(:event, :created, target: issue1, author: author1) } - let_it_be(:event2) { create(:event, :closed, target: issue2, author: author2) } - let_it_be(:event3) { create(:event, :merged, target: merge_request, author: author1) } - - let(:events) { [event1, event2, event3] } - - def format_row(event) - path = event.project.reload.project_namespace.traversal_ids.join('/') - - action = Event.actions[event.action] - [ - event.id, - "'#{path}/'", - event.author_id, - event.target_id, - "'#{event.target_type}'", - action, - event.created_at.to_f, - event.updated_at.to_f - ].join(',') + describe 'RSpec hooks' do + it 'ensures that tables are empty' do + results = ClickHouse::Client.select('SELECT * FROM events', :main) + expect(results).to be_empty end - describe 'RSpec hooks' do - it 'ensures that tables are empty' do - results = ClickHouse::Client.select('SELECT * FROM events', :main) - expect(results).to be_empty + it 'inserts data from CSV' do + time = Time.current.utc + Tempfile.open(['test', '.csv.gz']) do |f| + csv = "id,path,created_at\n10,1/2/,#{time.to_f}\n20,1/,#{time.to_f}" + File.binwrite(f.path, ActiveSupport::Gzip.compress(csv)) + + ClickHouse::Client.insert_csv('INSERT INTO events (id, path, created_at) FORMAT CSV', File.open(f.path), + :main) end - end - it 'inserts and modifies data' do - insert_query = <<~SQL - INSERT INTO events - (id, path, author_id, target_id, target_type, action, created_at, updated_at) - VALUES - (#{format_row(event1)}), - (#{format_row(event2)}), - (#{format_row(event3)}) - SQL + results = ClickHouse::Client.select('SELECT id, path, created_at FROM events ORDER BY id', :main) - ClickHouse::Client.execute(insert_query, :main) + expect(results).to match([ + { 'id' => 10, 'path' => '1/2/', 'created_at' => be_within(0.1.seconds).of(time) }, + { 'id' => 20, 'path' => '1/', 'created_at' => be_within(0.1.seconds).of(time) } + ]) + end + end - results = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main) - expect(results.size).to eq(3) + it 'inserts and modifies data' do + insert_query = <<~SQL + INSERT INTO events + (id, path, author_id, target_id, target_type, action, created_at, updated_at) + VALUES + (#{format_row(event1)}), + (#{format_row(event2)}), + (#{format_row(event3)}) + SQL + + ClickHouse::Client.execute(insert_query, :main) + + results = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main) + expect(results.size).to eq(3) + + last = results.last + expect(last).to match(a_hash_including( + 'id' => event3.id, + 'author_id' => event3.author_id, + 'created_at' => be_within(0.05).of(event3.created_at), + 'target_type' => event3.target_type + )) + + delete_query = ClickHouse::Client::Query.new( + raw_query: 'DELETE FROM events WHERE id = {id:UInt64}', + placeholders: { id: event3.id } + ) + + ClickHouse::Client.execute(delete_query, :main) + + select_query = ClickHouse::Client::Query.new( + raw_query: 'SELECT * FROM events WHERE id = {id:UInt64}', + placeholders: { id: event3.id } + ) + + results = ClickHouse::Client.select(select_query, :main) + expect(results).to be_empty + end + end + end - last = results.last - expect(last).to match(a_hash_including( - 'id' => event3.id, - 'author_id' => event3.author_id, - 'created_at' => be_within(0.05).of(event3.created_at), - 'target_type' => event3.target_type - )) + describe 'logging' do + let(:query_string) { "SELECT * FROM events WHERE id IN (4, 5, 6)" } - ClickHouse::Client.execute("DELETE FROM events WHERE id = #{event3.id}", :main) + context 'on dev and test environments' do + it 'logs the un-redacted query' do + expect(ClickHouse::Client.configuration.logger).to receive(:info).with({ + query: query_string, + correlation_id: a_kind_of(String) + }) - results = ClickHouse::Client.select("SELECT * FROM events WHERE id = #{event3.id}", :main) - expect(results).to be_empty - end + ClickHouse::Client.select(query_string, :main) + end + + it 'has a ClickHouse logger' do + expect(ClickHouse::Client.configuration.logger).to be_a(ClickHouse::Logger) end end end diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb index 14ff1a462e3..e402014df90 100644 --- a/spec/lib/gitlab/database/gitlab_schema_spec.rb +++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb @@ -148,7 +148,7 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do subject { described_class.table_schemas!(tables) } it 'returns the matched schemas' do - expect(subject).to match_array %i[gitlab_main_cell gitlab_main gitlab_ci].to_set + expect(subject).to match_array %i[gitlab_main_cell gitlab_ci].to_set end context 'when one of the tables does not have a matching table schema' do diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb index 5ef6d9173c4..89cecaff075 100644 --- a/spec/lib/gitlab/database/load_balancing/host_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing::Host do +RSpec.describe Gitlab::Database::LoadBalancing::Host, feature_category: :database do let(:load_balancer) do Gitlab::Database::LoadBalancing::LoadBalancer .new(Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)) @@ -124,13 +124,36 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do end it 'refreshes the status' do - expect(Gitlab::Database::LoadBalancing::Logger).to receive(:info) - .with(hash_including(event: :host_online)) - .and_call_original - expect(host).to be_online end + context 'and the host was previously online' do + # Hosts are online by default + + it 'does not log the online event' do + expect(Gitlab::Database::LoadBalancing::Logger) + .not_to receive(:info) + .with(hash_including(event: :host_online)) + + expect(host).to be_online + end + end + + context 'and the host was previously offline' do + before do + host.offline! + end + + it 'logs the online event' do + expect(Gitlab::Database::LoadBalancing::Logger) + .to receive(:info) + .with(hash_including(event: :host_online)) + .and_call_original + + expect(host).to be_online + end + end + context 'and replica is not up to date' do before do expect(host).to receive(:replica_is_up_to_date?).and_return(false) diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb index 26c8969efd8..c975f5b5ee4 100644 --- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb @@ -469,25 +469,58 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store, fe context 'when none of the replicas are caught up' do before do - expect(hosts).to all(receive(:caught_up?).with(location).and_return(false)) + expect(hosts[0]).to receive(:caught_up?).with(location).and_return(false) + expect(hosts[1]).to receive(:caught_up?).with(location).and_return(false) end - it 'returns false and does not update the host thread-local variable' do - expect(subject).to be false + it 'returns NONE_CAUGHT_UP and does not update the host thread-local variable' do + expect(subject).to eq(described_class::NONE_CAUGHT_UP) expect(set_host).to be_nil end + + it 'notifies caught_up_replica_pick.load_balancing with result false' do + expect(ActiveSupport::Notifications).to receive(:instrument) + .with('caught_up_replica_pick.load_balancing', { result: false }) + + subject + end end - context 'when any of the replicas is caught up' do + context 'when any replica is caught up' do before do - # `allow` for non-caught up host, because we may not even check it, if will find the caught up one earlier - allow(hosts[0]).to receive(:caught_up?).with(location).and_return(false) + expect(hosts[0]).to receive(:caught_up?).with(location).and_return(true) + expect(hosts[1]).to receive(:caught_up?).with(location).and_return(false) + end + + it 'returns ANY_CAUGHT_UP and sets host thread-local variable' do + expect(subject).to eq(described_class::ANY_CAUGHT_UP) + expect(set_host).to eq(hosts[0]) + end + + it 'notifies caught_up_replica_pick.load_balancing with result true' do + expect(ActiveSupport::Notifications).to receive(:instrument) + .with('caught_up_replica_pick.load_balancing', { result: true }) + + subject + end + end + + context 'when all of the replicas is caught up' do + before do + expect(hosts[0]).to receive(:caught_up?).with(location).and_return(true) expect(hosts[1]).to receive(:caught_up?).with(location).and_return(true) end - it 'returns true and sets host thread-local variable' do - expect(subject).to be true - expect(set_host).to eq(hosts[1]) + it 'returns ALL_CAUGHT_UP and sets host thread-local variable' do + expect(subject).to eq(described_class::ALL_CAUGHT_UP) + expect(set_host).to be_in([hosts[0], hosts[1]]) + end + + it 'notifies caught_up_replica_pick.load_balancing with result true' do + expect(ActiveSupport::Notifications).to receive(:instrument) + .with('caught_up_replica_pick.load_balancing', { result: true }) + + subject end end end diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb index 713bff5feea..863b1fb099b 100644 --- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do let(:app) { double(:app) } let(:middleware) { described_class.new(app) } let(:warden_user) { double(:warden, user: double(:user, id: 42)) } - let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 42]]) } + let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 99]]) } let(:multiple_sticking_objects) do Set.new([ [ActiveRecord::Base.sticking, :user, 42], @@ -25,7 +25,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do expect(middleware).to receive(:clear).twice - expect(middleware).to receive(:unstick_or_continue_sticking).with(env) + expect(middleware).to receive(:find_caught_up_replica).with(env) expect(middleware).to receive(:stick_if_necessary).with(env) expect(app).to receive(:call).with(env).and_return(10) @@ -41,12 +41,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do end end - describe '#unstick_or_continue_sticking' do + describe '#find_caught_up_replica' do it 'does not stick if no namespace and identifier could be found' do expect(ApplicationRecord.sticking) - .not_to receive(:unstick_or_continue_sticking) + .not_to receive(:find_caught_up_replica) - middleware.unstick_or_continue_sticking({}) + middleware.find_caught_up_replica({}) end it 'sticks to the primary if a warden user is found' do @@ -54,94 +54,125 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do Gitlab::Database::LoadBalancing.base_models.each do |model| expect(model.sticking) - .to receive(:unstick_or_continue_sticking) + .to receive(:find_caught_up_replica) .with(:user, 42) end - middleware.unstick_or_continue_sticking(env) + middleware.find_caught_up_replica(env) end it 'sticks to the primary if a sticking namespace and identifier is found' do env = { described_class::STICK_OBJECT => single_sticking_object } expect(ApplicationRecord.sticking) - .to receive(:unstick_or_continue_sticking) - .with(:user, 42) + .to receive(:find_caught_up_replica) + .with(:user, 99) - middleware.unstick_or_continue_sticking(env) + middleware.find_caught_up_replica(env) end it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do env = { described_class::STICK_OBJECT => multiple_sticking_objects } expect(ApplicationRecord.sticking) - .to receive(:unstick_or_continue_sticking) + .to receive(:find_caught_up_replica) .with(:user, 42) .ordered expect(ApplicationRecord.sticking) - .to receive(:unstick_or_continue_sticking) + .to receive(:find_caught_up_replica) .with(:runner, '123456789') .ordered expect(ApplicationRecord.sticking) - .to receive(:unstick_or_continue_sticking) + .to receive(:find_caught_up_replica) .with(:runner, '1234') .ordered - middleware.unstick_or_continue_sticking(env) + middleware.find_caught_up_replica(env) end end describe '#stick_if_necessary' do - it 'does not stick to the primary if not necessary' do - expect(ApplicationRecord.sticking) - .not_to receive(:stick_if_necessary) - - middleware.stick_if_necessary({}) + let(:env) { { 'warden' => warden, described_class::STICK_OBJECT => stick_object }.compact } + let(:stick_object) { nil } + let(:write_performed) { true } + let(:warden) { warden_user } + + before do + allow(::Gitlab::Database::LoadBalancing::Session.current).to receive(:performed_write?) + .and_return(write_performed) end - it 'sticks to the primary if a warden user is found' do - env = { 'warden' => warden_user } + subject { middleware.stick_if_necessary(env) } + it 'sticks to the primary for the user' do Gitlab::Database::LoadBalancing.base_models.each do |model| expect(model.sticking) - .to receive(:stick_if_necessary) + .to receive(:stick) .with(:user, 42) end - middleware.stick_if_necessary(env) + subject end - it 'sticks to the primary if a a single sticking object is found' do - env = { described_class::STICK_OBJECT => single_sticking_object } + context 'when no write was performed' do + let(:write_performed) { false } - expect(ApplicationRecord.sticking) - .to receive(:stick_if_necessary) - .with(:user, 42) + it 'does not stick to the primary' do + expect(ApplicationRecord.sticking) + .not_to receive(:stick) - middleware.stick_if_necessary(env) + subject + end end - it 'sticks to the primary if multiple sticking namespaces and identifiers were found' do - env = { described_class::STICK_OBJECT => multiple_sticking_objects } + context 'when there is no user in the env' do + let(:warden) { nil } - expect(ApplicationRecord.sticking) - .to receive(:stick_if_necessary) - .with(:user, 42) - .ordered + context 'when there is an explicit single sticking object in the env' do + let(:stick_object) { single_sticking_object } - expect(ApplicationRecord.sticking) - .to receive(:stick_if_necessary) - .with(:runner, '123456789') - .ordered + it 'sticks to the single sticking object' do + expect(ApplicationRecord.sticking) + .to receive(:stick) + .with(:user, 99) - expect(ApplicationRecord.sticking) - .to receive(:stick_if_necessary) - .with(:runner, '1234') - .ordered + subject + end + end + + context 'when there is multiple explicit sticking objects' do + let(:stick_object) { multiple_sticking_objects } + + it 'sticks to the sticking objects' do + expect(ApplicationRecord.sticking) + .to receive(:stick) + .with(:user, 42) + .ordered - middleware.stick_if_necessary(env) + expect(ApplicationRecord.sticking) + .to receive(:stick) + .with(:runner, '123456789') + .ordered + + expect(ApplicationRecord.sticking) + .to receive(:stick) + .with(:runner, '1234') + .ordered + + subject + end + end + + context 'when there no explicit sticking objects' do + it 'does not stick to the primary' do + expect(ApplicationRecord.sticking) + .not_to receive(:stick) + + subject + end + end end end diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb index 789919d2a51..7197b99fe33 100644 --- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb @@ -15,7 +15,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego load_balancer, nameserver: 'localhost', port: 8600, - record: 'foo' + record: 'foo', + disconnect_timeout: 1 # Short disconnect timeout to keep tests fast ) end @@ -192,6 +193,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego end describe '#replace_hosts' do + before do + stub_env('LOAD_BALANCER_PARALLEL_DISCONNECT', 'true') + allow(service) + .to receive(:load_balancer) + .and_return(load_balancer) + end + let(:address_foo) { described_class::Address.new('foo') } let(:address_bar) { described_class::Address.new('bar') } @@ -202,19 +210,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego ) end - before do - allow(service) - .to receive(:load_balancer) - .and_return(load_balancer) - end - it 'replaces the hosts of the load balancer' do service.replace_hosts([address_bar]) expect(load_balancer.host_list.host_names_and_ports).to eq([['bar', nil]]) end - it 'disconnects the old connections' do + it 'disconnects the old connections gracefully if possible' do host = load_balancer.host_list.hosts.first allow(service) @@ -222,11 +224,59 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego .and_return(2) expect(host) - .to receive(:disconnect!) - .with(timeout: 2) + .to receive(:try_disconnect).and_return(true) + + expect(host).not_to receive(:force_disconnect!) service.replace_hosts([address_bar]) end + + it 'disconnects the old connections forcefully if necessary' do + host = load_balancer.host_list.hosts.first + + allow(service) + .to receive(:disconnect_timeout) + .and_return(2) + + expect(host) + .to receive(:try_disconnect).and_return(false) + + expect(host).to receive(:force_disconnect!) + + service.replace_hosts([address_bar]) + end + + context 'without old hosts' do + before do + allow(load_balancer.host_list).to receive(:hosts).and_return([]) + end + + it 'does not log any load balancing event' do + expect(::Gitlab::Database::LoadBalancing::Logger).not_to receive(:info) + + service.replace_hosts([address_foo, address_bar]) + end + end + + context 'when LOAD_BALANCER_PARALLEL_DISCONNECT is false' do + before do + stub_env('LOAD_BALANCER_PARALLEL_DISCONNECT', 'false') + end + + it 'disconnects them sequentially' do + host = load_balancer.host_list.hosts.first + + allow(service) + .to receive(:disconnect_timeout) + .and_return(2) + + expect(host) + .to receive(:disconnect!) + .with(timeout: 2) + + service.replace_hosts([address_bar]) + end + end end describe '#addresses_from_dns' do @@ -475,4 +525,61 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_catego end end end + + context 'with service discovery connected to a real load balancer' do + let(:database_address) do + host, port = ApplicationRecord.connection_pool.db_config.configuration_hash.fetch(:host, :port) + described_class::Address.new(host, port) + end + + before do + # set up the load balancer to point to the test postgres instance with three seperate conections + allow(service).to receive(:addresses_from_dns) + .and_return([Gitlab::Database::LoadBalancing::Resolver::FAR_FUTURE_TTL, + [database_address, database_address, database_address]]) + .once + + service.perform_service_discovery + end + + it 'configures service discovery with three replicas' do + expect(service.load_balancer.host_list.hosts.count).to eq(3) + end + + it 'swaps the hosts out gracefully when not contended' do + expect(service.load_balancer.host_list.hosts.count).to eq(3) + + host = service.load_balancer.host_list.next + + # Check out and use a connection from a host so that there is something to clean up + host.pool.with_connection do |connection| + expect { connection.execute('select 1') }.not_to raise_error + end + + allow(service).to receive(:addresses_from_dns).and_return([Gitlab::Database::LoadBalancing::Resolver::FAR_FUTURE_TTL, []]) + + service.load_balancer.host_list.hosts.each do |h| + # Expect that the host gets gracefully disconnected + expect(h).not_to receive(:force_disconnect!) + end + + expect { service.perform_service_discovery }.to change { host.pool.stat[:connections] }.from(1).to(0) + end + + it 'swaps the hosts out forcefully when contended' do + host = service.load_balancer.host_list.next + + # Check out a connection and leave it checked out (simulate a web request) + connection = host.pool.checkout + connection.execute('select 1') + + # Expect that the connection is forcefully checked in + expect(host).to receive(:force_disconnect!).and_call_original + expect(connection).to receive(:steal!).and_call_original + + allow(service).to receive(:addresses_from_dns).and_return([Gitlab::Database::LoadBalancing::Resolver::FAR_FUTURE_TTL, []]) + + service.perform_service_discovery + end + end end diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb index 7703b5680c2..aaca544ef80 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb @@ -8,6 +8,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ let(:location) { '0/D525E3A8' } let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_s => location } } let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } } + let(:any_caught_up) { Gitlab::Database::LoadBalancing::LoadBalancer::ANY_CAUGHT_UP } + let(:all_caught_up) { Gitlab::Database::LoadBalancing::LoadBalancer::ALL_CAUGHT_UP } + let(:none_caught_up) { Gitlab::Database::LoadBalancing::LoadBalancer::NONE_CAUGHT_UP } before do skip_feature_flags_yaml_validation @@ -67,7 +70,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ expect(ActiveRecord::Base.load_balancer) .to receive(:select_up_to_date_host) .with(location) - .and_return(true) + .and_return(any_caught_up) run_middleware do expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).not_to be_truthy @@ -86,7 +89,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ allow(lb) .to receive(:select_up_to_date_host) .with(location) - .and_return(true) + .and_return(any_caught_up) end end @@ -100,7 +103,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ allow(ActiveRecord::Base.load_balancer) .to receive(:select_up_to_date_host) .with(wal_locations[:main]) - .and_return(true) + .and_return(any_caught_up) end it_behaves_like 'replica is up to date', 'replica' @@ -133,7 +136,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ context 'when replica is up to date' do before do Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - allow(lb).to receive(:select_up_to_date_host).and_return(true) + allow(lb).to receive(:select_up_to_date_host).and_return(any_caught_up) end end @@ -147,7 +150,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ context 'when replica is not up to date' do before do Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - allow(lb).to receive(:select_up_to_date_host).and_return(false, true) + allow(lb).to receive(:select_up_to_date_host).and_return(none_caught_up, any_caught_up) end end @@ -161,7 +164,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ context 'when replica is never not up to date' do before do Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - allow(lb).to receive(:select_up_to_date_host).and_return(false, false) + allow(lb).to receive(:select_up_to_date_host).and_return(none_caught_up, none_caught_up) end end @@ -267,7 +270,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ context 'when replica is not up to date' do before do Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - allow(lb).to receive(:select_up_to_date_host).and_return(false) + allow(lb).to receive(:select_up_to_date_host).and_return(none_caught_up) end end @@ -282,7 +285,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ .to eq(true) end - it 'returns true when all load balancers are in sync' do + it 'returns true when all load balancers are in sync for some replicas' do locations = {} Gitlab::Database::LoadBalancing.each_load_balancer do |lb| @@ -291,7 +294,23 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ expect(lb) .to receive(:select_up_to_date_host) .with('foo') - .and_return(true) + .and_return(any_caught_up) + end + + expect(middleware.send(:databases_in_sync?, locations)) + .to eq(true) + end + + it 'returns true when all load balancers are in sync for all replicas' do + locations = {} + + Gitlab::Database::LoadBalancing.each_load_balancer do |lb| + locations[lb.name] = 'foo' + + expect(lb) + .to receive(:select_up_to_date_host) + .with('foo') + .and_return(all_caught_up) end expect(middleware.send(:databases_in_sync?, locations)) @@ -307,7 +326,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ allow(lb) .to receive(:select_up_to_date_host) .with('foo') - .and_return(false) + .and_return(none_caught_up) end expect(middleware.send(:databases_in_sync?, locations)) @@ -324,7 +343,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ allow(lb) .to receive(:select_up_to_date_host) .with('foo') - .and_return(false) + .and_return(none_caught_up) end expect(middleware.send(:databases_in_sync?, locations)) @@ -346,8 +365,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end def replication_lag!(exists) + caught_up = exists ? none_caught_up : all_caught_up Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - allow(lb).to receive(:select_up_to_date_host).and_return(!exists) + allow(lb).to receive(:select_up_to_date_host).and_return(caught_up) end end end diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb index ff31a5cd6cb..8c2901c3b89 100644 --- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb @@ -3,327 +3,142 @@ require 'spec_helper' RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do - let(:sticking) do - described_class.new(ActiveRecord::Base.load_balancer) - end + let(:load_balancer) { ActiveRecord::Base.load_balancer } + let(:primary_write_location) { 'the-primary-lsn' } + let(:last_write_location) { 'the-last-write-lsn' } - after do - Gitlab::Database::LoadBalancing::Session.clear_session + let(:sticking) do + described_class.new(load_balancer) end - shared_examples 'sticking' do - before do - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_write_location) - .and_return('foo') - end - - it 'sticks an entity to the primary', :aggregate_failures do - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_only?) - .and_return(false) - - ids.each do |id| - expect(sticking) - .to receive(:set_write_location_for) - .with(:user, id, 'foo') - end + let(:redis) { instance_double(::Gitlab::Redis::MultiStore) } - expect(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:use_primary!) - - subject - end + before do + allow(::Gitlab::Redis::DbLoadBalancing).to receive(:with).and_yield(redis) - it 'does not update the write location when no replicas are used' do - expect(sticking).not_to receive(:set_write_location_for) + allow(ActiveRecord::Base.load_balancer) + .to receive(:primary_write_location) + .and_return(primary_write_location) - subject - end + allow(redis).to receive(:get) + .with("database-load-balancing/write-location/#{load_balancer.name}/user/42") + .and_return(last_write_location) end - shared_examples 'tracking status in redis' do - describe '#stick_or_unstick_request' do - it 'sticks or unsticks a single object and updates the Rack environment' do - expect(sticking) - .to receive(:unstick_or_continue_sticking) - .with(:user, 42) - - env = {} - - sticking.stick_or_unstick_request(env, :user, 42) - - expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a) - .to eq([[sticking, :user, 42]]) - end - - it 'sticks or unsticks multiple objects and updates the Rack environment' do - expect(sticking) - .to receive(:unstick_or_continue_sticking) - .with(:user, 42) - .ordered - - expect(sticking) - .to receive(:unstick_or_continue_sticking) - .with(:runner, '123456789') - .ordered - - env = {} - - sticking.stick_or_unstick_request(env, :user, 42) - sticking.stick_or_unstick_request(env, :runner, '123456789') - - expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq( - [ - [sticking, :user, 42], - [sticking, :runner, - '123456789'] - ]) - end - end - - describe '#stick_if_necessary' do - it 'does not stick if no write was performed' do - allow(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:performed_write?) - .and_return(false) - - expect(sticking).not_to receive(:stick) - - sticking.stick_if_necessary(:user, 42) - end - - it 'sticks to the primary if a write was performed' do - allow(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:performed_write?) - .and_return(true) - - expect(sticking) - .to receive(:stick) - .with(:user, 42) + after do + Gitlab::Database::LoadBalancing::Session.clear_session + end - sticking.stick_if_necessary(:user, 42) - end + describe '#find_caught_up_replica' do + before do + allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original end - describe '#all_caught_up?' do - let(:lb) { ActiveRecord::Base.load_balancer } - let(:last_write_location) { 'foo' } - - before do - allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original - - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return(last_write_location) - end - - context 'when no write location could be found' do - let(:last_write_location) { nil } - - it 'returns true' do - expect(lb).not_to receive(:select_up_to_date_host) - - expect(sticking.all_caught_up?(:user, 42)).to eq(true) - end - end - - context 'when all secondaries have caught up' do - before do - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true) - end - - it 'returns true, and unsticks' do - expect(sticking) - .to receive(:unstick) - .with(:user, 42) - - expect(sticking.all_caught_up?(:user, 42)).to eq(true) - end + context 'when no write location could be found' do + let(:last_write_location) { nil } - it 'notifies with the proper event payload' do - expect(ActiveSupport::Notifications) - .to receive(:instrument) - .with('caught_up_replica_pick.load_balancing', { result: true }) - .and_call_original + it 'returns true' do + expect(load_balancer).not_to receive(:select_up_to_date_host) - sticking.all_caught_up?(:user, 42) - end + expect(sticking.find_caught_up_replica(:user, 42)).to eq(true) end - context 'when the secondaries have not yet caught up' do - before do - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false) - end - - it 'returns false' do - expect(sticking.all_caught_up?(:user, 42)).to eq(false) - end + context 'when use_primary_on_empty_location is true' do + it 'returns false, does not unstick and calls use_primary!' do + expect(load_balancer).not_to receive(:select_up_to_date_host) - it 'notifies with the proper event payload' do - expect(ActiveSupport::Notifications) - .to receive(:instrument) - .with('caught_up_replica_pick.load_balancing', { result: false }) - .and_call_original + expect(redis).not_to receive(:del) + expect(::Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary!) - sticking.all_caught_up?(:user, 42) + expect(sticking.find_caught_up_replica(:user, 42, use_primary_on_empty_location: true)).to eq(false) end end end - describe '#unstick_or_continue_sticking' do - let(:lb) { ActiveRecord::Base.load_balancer } - - it 'simply returns if no write location could be found' do - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return(nil) - - expect(lb).not_to receive(:select_up_to_date_host) - - sticking.unstick_or_continue_sticking(:user, 42) - end - - it 'unsticks if all secondaries have caught up' do - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return('foo') + context 'when all replicas have caught up' do + it 'returns true and unsticks' do + expect(load_balancer).to receive(:select_up_to_date_host).with(last_write_location) + .and_return(::Gitlab::Database::LoadBalancing::LoadBalancer::ALL_CAUGHT_UP) - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true) + expect(redis) + .to receive(:del) + .with("database-load-balancing/write-location/#{load_balancer.name}/user/42") - expect(sticking) - .to receive(:unstick) - .with(:user, 42) - - sticking.unstick_or_continue_sticking(:user, 42) + expect(sticking.find_caught_up_replica(:user, 42)).to eq(true) end + end - it 'continues using the primary if the secondaries have not yet caught up' do - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return('foo') - - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false) - - expect(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:use_primary!) + context 'when only some of the replicas have caught up' do + it 'returns true and does not unstick' do + expect(load_balancer).to receive(:select_up_to_date_host).with(last_write_location) + .and_return(::Gitlab::Database::LoadBalancing::LoadBalancer::ANY_CAUGHT_UP) - sticking.unstick_or_continue_sticking(:user, 42) - end - end + expect(redis).not_to receive(:del) - describe '#stick' do - it_behaves_like 'sticking' do - let(:ids) { [42] } - subject { sticking.stick(:user, ids.first) } + expect(sticking.find_caught_up_replica(:user, 42)).to eq(true) end end - describe '#bulk_stick' do - it_behaves_like 'sticking' do - let(:ids) { [42, 43] } - subject { sticking.bulk_stick(:user, ids) } + context 'when none of the replicas have caught up' do + before do + allow(load_balancer).to receive(:select_up_to_date_host).with(last_write_location) + .and_return(::Gitlab::Database::LoadBalancing::LoadBalancer::NONE_CAUGHT_UP) end - end - - describe '#mark_primary_write_location' do - it 'updates the write location with the load balancer' do - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_write_location) - .and_return('foo') - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_only?) - .and_return(false) + it 'returns false, does not unstick and calls use_primary!' do + expect(redis).not_to receive(:del) + expect(::Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary!) - expect(sticking) - .to receive(:set_write_location_for) - .with(:user, 42, 'foo') - - sticking.mark_primary_write_location(:user, 42) + expect(sticking.find_caught_up_replica(:user, 42)).to eq(false) end - it 'does nothing when no replicas are used' do - expect(sticking).not_to receive(:set_write_location_for) + context 'when use_primary_on_failure is false' do + it 'does not call use_primary!' do + expect(redis).not_to receive(:del) + expect(::Gitlab::Database::LoadBalancing::Session.current).not_to receive(:use_primary!) - sticking.mark_primary_write_location(:user, 42) + expect(sticking.find_caught_up_replica(:user, 42, use_primary_on_failure: false)).to eq(false) + end end end + end - describe '#unstick' do - it 'removes the sticking data from Redis' do - sticking.set_write_location_for(:user, 4, 'foo') - sticking.unstick(:user, 4) + shared_examples 'sticking' do + it 'sticks an entity to the primary', :aggregate_failures do + allow(ActiveRecord::Base.load_balancer) + .to receive(:primary_only?) + .and_return(false) - expect(sticking.last_write_location_for(:user, 4)).to be_nil + ids.each do |id| + expect(redis) + .to receive(:set) + .with("database-load-balancing/write-location/#{load_balancer.name}/user/#{id}", 'the-primary-lsn', ex: 30) end - end - describe '#last_write_location_for' do - it 'returns the last WAL write location for a user' do - sticking.set_write_location_for(:user, 4, 'foo') + expect(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:use_primary!) - expect(sticking.last_write_location_for(:user, 4)).to eq('foo') - end + subject end - describe '#select_caught_up_replicas' do - let(:lb) { ActiveRecord::Base.load_balancer } - - context 'with no write location' do - before do - allow(sticking) - .to receive(:last_write_location_for) - .with(:project, 42) - .and_return(nil) - end - - it 'returns false and does not try to find caught up hosts' do - expect(lb).not_to receive(:select_up_to_date_host) - expect(sticking.select_caught_up_replicas(:project, 42)).to be false - end - end - - context 'with write location' do - before do - allow(sticking) - .to receive(:last_write_location_for) - .with(:project, 42) - .and_return('foo') - end + it 'does not update the write location when no replicas are used' do + expect(sticking).not_to receive(:set_write_location_for) - it 'returns true, selects hosts, and unsticks if any secondary has caught up' do - expect(lb).to receive(:select_up_to_date_host).and_return(true) - expect(sticking) - .to receive(:unstick) - .with(:project, 42) - expect(sticking.select_caught_up_replicas(:project, 42)).to be true - end - end + subject end end - context 'with multi-store feature flags turned on' do - it_behaves_like 'tracking status in redis' - end - - context 'when both multi-store feature flags are off' do - before do - stub_feature_flags(use_primary_and_secondary_stores_for_db_load_balancing: false) - stub_feature_flags(use_primary_store_as_default_for_db_load_balancing: false) + describe '#stick' do + it_behaves_like 'sticking' do + let(:ids) { [42] } + subject { sticking.stick(:user, ids.first) } end - - it_behaves_like 'tracking status in redis' end - describe '#redis_key_for' do - it 'returns a String' do - expect(sticking.redis_key_for(:user, 42)) - .to eq('database-load-balancing/write-location/main/user/42') + describe '#bulk_stick' do + it_behaves_like 'sticking' do + let(:ids) { [42, 43] } + subject { sticking.bulk_stick(:user, ids) } end end end diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb index 0b25389c667..a12e0909dc2 100644 --- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb +++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb @@ -107,6 +107,8 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do before do observe + rescue Exception # rubocop:disable Lint/RescueException + # ignore (we expect this exception) end it 'records a valid observation', :aggregate_failures do diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb index 6cac7abb703..2fa4c9e562f 100644 --- a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb +++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb @@ -14,14 +14,17 @@ RSpec.describe 'cross-database foreign keys' do 'gitlab_subscriptions.hosted_plan_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422012 'group_import_states.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421210 'identities.saml_provider_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422010 - 'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044 + 'issues.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154 + 'issues.closed_by_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154 + 'issues.updated_by_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154 + 'issue_assignees.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422154 'merge_requests.assignee_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080 'merge_requests.updated_by_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080 'merge_requests.merge_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080 'merge_requests.author_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422080 + 'project_authorizations.user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/422044 'projects.creator_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844 'projects.marked_for_deletion_by_user_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/421844 - 'routes.namespace_id', # https://gitlab.com/gitlab-org/gitlab/-/issues/420869 'user_group_callouts.user_id' # https://gitlab.com/gitlab-org/gitlab/-/issues/421287 ] end diff --git a/spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb b/spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb new file mode 100644 index 00000000000..ca7b6c8aa98 --- /dev/null +++ b/spec/lib/gitlab/database/no_overrides_for_through_associations_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'overridden has_many :through associations', :eager_load, feature_category: :database do + let!(:allowed_overrides) do + [ + # https://gitlab.com/gitlab-org/gitlab/-/issues/424851 + override_class.new(:assignees, 'app/models/concerns/deprecated_assignee.rb'), + # https://gitlab.com/gitlab-org/gitlab/-/issues/424852 + override_class.new(:authorized_projects, 'app/models/user.rb'), + # https://gitlab.com/gitlab-org/gitlab/-/issues/424853 + override_class.new(:project, 'app/models/incident_management/issuable_escalation_status.rb'), + # https://gitlab.com/gitlab-org/gitlab/-/issues/424854 + override_class.new(:remediations, 'ee/app/models/vulnerabilities/finding.rb') + ] + end + + let!(:override_class) do + Struct.new(:method_name, :file_path, :association_class) do + def initialize(method_name, file_path, association_class = nil) + super(method_name, file_path, association_class) + end + + def ==(other) + full_source_path, short_path = + file_path.length > other.file_path.length ? [file_path, other.file_path] : [other.file_path, file_path] + method_name == other.method_name && full_source_path.include?(short_path) + end + + def association_type_name + if association_class == ActiveRecord::Associations::HasOneThroughAssociation + 'has_one through:' + else + 'has_many through:' + end + end + end + end + + let!(:documentation_link) do + 'https://docs.gitlab.com/ee/development/gotchas.html#do-not-override-has_many-through-or-has_one-through-associations' + end + + it 'onlies have allowed list of overridden has_many/has_one :through associations', :aggregate_failures do + overridden_associations.each do |overriden_method| + expect(allowed_override?(overriden_method)).to be_truthy, + "Found an overridden #{overriden_method.association_type_name} association " \ + "named `#{overriden_method.method_name}`, in #{overriden_method.file_path}, which isn't allowed. " \ + "Overriding such associations can have dangerous impacts, see: #{documentation_link}" + end + end + + private + + def allowed_override?(overriden_method) + allowed_overrides.find do |override| + override == overriden_method + end + end + + def overridden_associations + ApplicationRecord.descendants.reject(&:abstract_class?).each_with_object([]) do |klass, array| + through_reflections = klass.reflect_on_all_associations.select do |assoc| + assoc.is_a?(ActiveRecord::Reflection::ThroughReflection) + end + + overridden_methods = through_reflections + .map { |association| [association.association_class, association.name] } + .map { |association_class, method_name| [method_name, source_location(klass, method_name), association_class] } + .reject { |_, source_location, _| source_location.include?('activerecord-') } + + array << override_class.new(*overridden_methods.flatten) if overridden_methods.any? + end + end + + def source_location(klass, method_name) + klass.instance_method(method_name).source_location.first + end +end diff --git a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb index f415e892818..79c2c9e32d2 100644 --- a/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb +++ b/spec/lib/gitlab/database/partitioning/ci_sliding_list_strategy_spec.rb @@ -175,4 +175,30 @@ RSpec.describe Gitlab::Database::Partitioning::CiSlidingListStrategy, feature_ca end.not_to raise_error end end + + describe 'attributes' do + let(:partitioning_key) { :partition } + let(:next_partition_if) { -> { true } } + let(:detach_partition_if) { -> { false } } + let(:analyze_interval) { 1.week } + + subject(:strategy) do + described_class.new( + model, partitioning_key, + next_partition_if: next_partition_if, + detach_partition_if: detach_partition_if, + analyze_interval: analyze_interval + ) + end + + specify do + expect(strategy).to have_attributes({ + model: model, + partitioning_key: partitioning_key, + next_partition_if: next_partition_if, + detach_partition_if: detach_partition_if, + analyze_interval: analyze_interval + }) + end + end end diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb index 50115a6f3dd..3afa338fdf7 100644 --- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb +++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do +RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy, feature_category: :database do let(:connection) { ActiveRecord::Base.connection } describe '#current_partitions' do @@ -273,4 +273,32 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do end end end + + describe 'attributes' do + let(:partitioning_key) { :partition } + let(:retain_non_empty_partitions) { true } + let(:retain_for) { 12.months } + let(:analyze_interval) { 1.week } + let(:model) { class_double(ApplicationRecord, table_name: table_name, connection: connection) } + let(:table_name) { :_test_partitioned_test } + + subject(:strategy) do + described_class.new( + model, partitioning_key, + retain_for: retain_for, + retain_non_empty_partitions: retain_non_empty_partitions, + analyze_interval: analyze_interval + ) + end + + specify do + expect(strategy).to have_attributes({ + model: model, + partitioning_key: partitioning_key, + retain_for: retain_for, + retain_non_empty_partitions: retain_non_empty_partitions, + analyze_interval: analyze_interval + }) + end + end end diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb index eac4a162879..c41228777ca 100644 --- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb +++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Partitioning::PartitionManager do +RSpec.describe Gitlab::Database::Partitioning::PartitionManager, feature_category: :database do + include ActiveSupport::Testing::TimeHelpers include Database::PartitioningHelpers include ExclusiveLeaseHelpers @@ -15,7 +16,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do let(:connection) { ActiveRecord::Base.connection } let(:table) { partitioned_table_name } let(:partitioning_strategy) do - double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) + double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil, analyze_interval: nil) end let(:partitions) do @@ -125,7 +126,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do let(:connection) { ActiveRecord::Base.connection } let(:table) { :_test_foo } let(:partitioning_strategy) do - double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil) + double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil, analyze_interval: nil) end before do @@ -256,6 +257,154 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do end end + describe 'analyze partitioned table' do + let(:analyze) { true } + let(:analyze_table) { partitioned_table_name } + let(:analyze_partition) { "#{partitioned_table_name}_1" } + let(:analyze_regex) { /ANALYZE VERBOSE "#{analyze_table}"/ } + let(:analyze_interval) { 1.week } + let(:connection) { my_model.connection } + let(:create_partition) { true } + let(:my_model) do + interval = analyze_interval + Class.new(ApplicationRecord) do + include PartitionedTable + + partitioned_by :partition_id, + strategy: :ci_sliding_list, + next_partition_if: proc { false }, + detach_partition_if: proc { false }, + analyze_interval: interval + end + end + + shared_examples_for 'run only once analyze within interval' do + specify do + control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) } + expect(control.occurrences).to include(analyze_regex) + + control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) } + expect(control.occurrences).not_to include(analyze_regex) + + travel_to((analyze_interval * 2).since) do + control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) } + expect(control.occurrences).to include(analyze_regex) + end + end + end + + shared_examples_for 'not to run the analyze at all' do + specify do + control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) } + expect(control.occurrences).not_to include(analyze_regex) + + control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) } + expect(control.occurrences).not_to include(analyze_regex) + + travel_to((analyze_interval * 2).since) do + control = ActiveRecord::QueryRecorder.new { described_class.new(my_model, connection: connection).sync_partitions(analyze: analyze) } + expect(control.occurrences).not_to include(analyze_regex) + end + end + end + + before do + my_model.table_name = partitioned_table_name + + connection.execute(<<~SQL) + CREATE TABLE #{analyze_table}(id serial) PARTITION BY LIST (id); + SQL + + connection.execute(<<~SQL) if create_partition + CREATE TABLE IF NOT EXISTS #{analyze_partition} PARTITION OF #{analyze_table} FOR VALUES IN (1); + SQL + + allow(connection).to receive(:select_value).and_return(nil, Time.current, Time.current) + end + + context 'when feature flag database_analyze_on_partitioned_tables is enabled' do + before do + stub_feature_flags(database_analyze_on_partitioned_tables: true) + end + + it_behaves_like 'run only once analyze within interval' + + context 'when analyze is false' do + let(:analyze) { false } + + it_behaves_like 'not to run the analyze at all' + end + + context 'when model does not set analyze_interval' do + let(:my_model) do + Class.new(ApplicationRecord) do + include PartitionedTable + + partitioned_by :partition_id, + strategy: :ci_sliding_list, + next_partition_if: proc { false }, + detach_partition_if: proc { false } + end + end + + it_behaves_like 'not to run the analyze at all' + end + + context 'when no partition is created' do + let(:create_partition) { false } + + it_behaves_like 'run only once analyze within interval' + end + end + + context 'when feature flag database_analyze_on_partitioned_tables is disabled' do + before do + stub_feature_flags(database_analyze_on_partitioned_tables: false) + end + + it_behaves_like 'not to run the analyze at all' + + context 'when analyze is false' do + let(:analyze) { false } + + it_behaves_like 'not to run the analyze at all' + end + + context 'when model does not set analyze_interval' do + let(:my_model) do + Class.new(ApplicationRecord) do + include PartitionedTable + + partitioned_by :partition_id, + strategy: :ci_sliding_list, + next_partition_if: proc { false }, + detach_partition_if: proc { false } + end + end + + it_behaves_like 'not to run the analyze at all' + end + + context 'when no partition is created' do + let(:create_partition) { false } + + it_behaves_like 'not to run the analyze at all' + end + end + end + + describe 'strategies that support analyze_interval' do + [ + ::Gitlab::Database::Partitioning::MonthlyStrategy, + ::Gitlab::Database::Partitioning::SlidingListStrategy, + ::Gitlab::Database::Partitioning::CiSlidingListStrategy + ].each do |klass| + specify "#{klass} supports analyze_interval" do + expect(klass).to be_method_defined(:analyze_interval) + end + end + end + context 'creating and then detaching partitions for a table' do let(:connection) { ActiveRecord::Base.connection } let(:my_model) do diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb index 5b6967c2d14..ac4d345271e 100644 --- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb +++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb @@ -290,4 +290,30 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy, feature_cate expect(partition_3_model.partition).to eq(3) end end + + describe 'attributes' do + let(:partitioning_key) { :partition } + let(:next_partition_if) { -> { puts "next_partition_if" } } + let(:detach_partition_if) { -> { puts "detach_partition_if" } } + let(:analyze_interval) { 1.week } + + subject(:strategy) do + described_class.new( + model, partitioning_key, + next_partition_if: next_partition_if, + detach_partition_if: detach_partition_if, + analyze_interval: analyze_interval + ) + end + + specify do + expect(strategy).to have_attributes({ + model: model, + partitioning_key: partitioning_key, + next_partition_if: next_partition_if, + detach_partition_if: detach_partition_if, + analyze_interval: analyze_interval + }) + end + end end diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb index a1ae75ac916..e53e0cb8def 100644 --- a/spec/lib/gitlab/database/partitioning_spec.rb +++ b/spec/lib/gitlab/database/partitioning_spec.rb @@ -8,6 +8,10 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do let(:main_connection) { ApplicationRecord.connection } + before do + stub_feature_flags(disallow_database_ddl_feature_flags: false) + end + around do |example| previously_registered_models = described_class.registered_models.dup described_class.instance_variable_set(:@registered_models, Set.new) @@ -32,7 +36,7 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do describe '.sync_partitions_ignore_db_error' do it 'calls sync_partitions' do - expect(described_class).to receive(:sync_partitions) + expect(described_class).to receive(:sync_partitions).with(analyze: false) described_class.sync_partitions_ignore_db_error end @@ -100,6 +104,55 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do .and change { find_partitions(table_names.last).size }.from(0) end + context 'for analyze' do + let(:analyze_regex) { /ANALYZE VERBOSE / } + let(:analyze) { true } + + shared_examples_for 'not running analyze' do + specify do + control = ActiveRecord::QueryRecorder.new { described_class.sync_partitions(analyze: analyze) } + expect(control.occurrences).not_to include(analyze_regex) + end + end + + context 'when analyze_interval is not set' do + it_behaves_like 'not running analyze' + + context 'when analyze is set to false' do + it_behaves_like 'not running analyze' + end + end + + context 'when analyze_interval is set' do + let(:models) do + [ + Class.new(ApplicationRecord) do + include PartitionedTable + + self.table_name = :_test_partitioning_test1 + partitioned_by :created_at, strategy: :monthly, analyze_interval: 1.week + end, + Class.new(Gitlab::Database::Partitioning::TableWithoutModel).tap do |klass| + klass.table_name = :_test_partitioning_test2 + klass.partitioned_by(:created_at, strategy: :monthly, analyze_interval: 1.week) + klass.limit_connection_names = %i[main] + end + ] + end + + it 'runs analyze' do + control = ActiveRecord::QueryRecorder.new { described_class.sync_partitions(models, analyze: analyze) } + expect(control.occurrences).to include(analyze_regex) + end + + context 'analyze is false' do + let(:analyze) { false } + + it_behaves_like 'not running analyze' + end + end + end + context 'with multiple databases' do it 'creates partitions in each database' do skip_if_shared_database(:ci) @@ -165,11 +218,11 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do execute_on_each_database("DROP TABLE IF EXISTS #{table_name}") execute_on_each_database(<<~SQL) - CREATE TABLE #{table_name} ( - id serial not null, - created_at timestamptz not null, - PRIMARY KEY (id, created_at)) - PARTITION BY RANGE (created_at); + CREATE TABLE #{table_name} ( + id serial not null, + created_at timestamptz not null, + PRIMARY KEY (id, created_at)) + PARTITION BY RANGE (created_at); SQL end end @@ -204,6 +257,20 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do described_class.sync_partitions(models) end end + + context 'when disallow_database_ddl_feature_flags feature flag is enabled' do + before do + described_class.register_models(models) + stub_feature_flags(disallow_database_ddl_feature_flags: true) + end + + it 'skips sync_partitions' do + expect(described_class::PartitionManager).not_to receive(:new) + expect(described_class).to receive(:sync_partitions).and_call_original + + described_class.sync_partitions(models) + end + end end describe '.report_metrics' do @@ -277,6 +344,18 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do end end + context 'when the feature disallow DDL feature flags is enabled' do + before do + stub_feature_flags(disallow_database_ddl_feature_flags: true) + end + + it 'does not call the DetachedPartitionDropper' do + expect(Gitlab::Database::Partitioning::DetachedPartitionDropper).not_to receive(:new) + + described_class.drop_detached_partitions + end + end + def table_exists?(table_name) table_oid(table_name).present? end diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb index 851fc7ea3cd..441f6476abe 100644 --- a/spec/lib/gitlab/database/reindexing_spec.rb +++ b/spec/lib/gitlab/database/reindexing_spec.rb @@ -6,6 +6,10 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t include ExclusiveLeaseHelpers include Database::DatabaseHelpers + before do + stub_feature_flags(disallow_database_ddl_feature_flags: false) + end + describe '.invoke' do let(:databases) { Gitlab::Database.database_base_models_with_gitlab_shared } let(:databases_count) { databases.count } @@ -44,6 +48,14 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t described_class.invoke end + + it 'does not execute async index creation when disable ddl flag is enabled' do + stub_feature_flags(disallow_database_ddl_feature_flags: true) + + expect(Gitlab::Database::AsyncIndexes).not_to receive(:create_pending_indexes!) + + described_class.invoke + end end it 'executes async index destruction prior to any reindexing actions' do @@ -86,6 +98,14 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t described_class.invoke end + + it 'does not execute async index creation when disable ddl flag is enabled' do + stub_feature_flags(disallow_database_ddl_feature_flags: true) + + expect(Gitlab::Database::AsyncIndexes).not_to receive(:validate_pending_entries!) + + described_class.invoke + end end end diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb index 04bec50088d..e41c7d34378 100644 --- a/spec/lib/gitlab/database/tables_truncate_spec.rb +++ b/spec/lib/gitlab/database/tables_truncate_spec.rb @@ -9,6 +9,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba let(:min_batch_size) { 1 } let(:main_connection) { ApplicationRecord.connection } let(:ci_connection) { Ci::ApplicationRecord.connection } + let(:logger) { instance_double(Logger) } # Main Database let(:main_db_main_item_model) { table("_test_gitlab_main_items", database: "main") } @@ -32,8 +33,123 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba table("gitlab_partitions_dynamic._test_gitlab_hook_logs_202201", database: "ci") end + before do + skip_if_shared_database(:ci) + + # Creating some test tables on the main database + main_tables_sql = <<~SQL + CREATE TABLE _test_gitlab_main_items (id serial NOT NULL PRIMARY KEY); + + CREATE TABLE _test_gitlab_main_references ( + id serial NOT NULL PRIMARY KEY, + item_id BIGINT NOT NULL, + CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id) + ); + + CREATE TABLE _test_gitlab_hook_logs ( + id bigserial not null, + created_at timestamptz not null, + item_id BIGINT NOT NULL, + PRIMARY KEY (id, created_at), + CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id) + ) PARTITION BY RANGE(created_at); + + CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202201 + PARTITION OF _test_gitlab_hook_logs + FOR VALUES FROM ('20220101') TO ('20220131'); + + CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202202 + PARTITION OF _test_gitlab_hook_logs + FOR VALUES FROM ('20220201') TO ('20220228'); + + ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201; + SQL + + execute_on_each_database(main_tables_sql) + + ci_tables_sql = <<~SQL + CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY); + + CREATE TABLE _test_gitlab_ci_references ( + id serial NOT NULL PRIMARY KEY, + item_id BIGINT NOT NULL, + CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_ci_items(id) + ); + SQL + + execute_on_each_database(ci_tables_sql) + + internal_tables_sql = <<~SQL + CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY); + SQL + + execute_on_each_database(internal_tables_sql) + + # Filling the tables + 5.times do |i| + # Main Database + main_db_main_item_model.create!(id: i) + main_db_main_reference_model.create!(item_id: i) + main_db_ci_item_model.create!(id: i) + main_db_ci_reference_model.create!(item_id: i) + main_db_shared_item_model.create!(id: i) + main_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00') + main_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00') + # CI Database + ci_db_main_item_model.create!(id: i) + ci_db_main_reference_model.create!(item_id: i) + ci_db_ci_item_model.create!(id: i) + ci_db_ci_reference_model.create!(item_id: i) + ci_db_shared_item_model.create!(id: i) + ci_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00') + ci_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00') + end + + Gitlab::Database::SharedModel.using_connection(main_connection) do + Postgresql::DetachedPartition.create!( + table_name: '_test_gitlab_hook_logs_202201', + drop_after: Time.current + ) + end + + Gitlab::Database::SharedModel.using_connection(ci_connection) do + Postgresql::DetachedPartition.create!( + table_name: '_test_gitlab_hook_logs_202201', + drop_after: Time.current + ) + end + + allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return( + { + "_test_gitlab_main_items" => :gitlab_main, + "_test_gitlab_main_references" => :gitlab_main, + "_test_gitlab_hook_logs" => :gitlab_main, + "_test_gitlab_ci_items" => :gitlab_ci, + "_test_gitlab_ci_references" => :gitlab_ci, + "_test_gitlab_shared_items" => :gitlab_shared, + "_test_gitlab_geo_items" => :gitlab_geo + } + ) + + allow(Gitlab::Database::GitlabSchema).to receive(:views_and_tables_to_schema).and_return( + { + "_test_gitlab_main_items" => :gitlab_main, + "_test_gitlab_main_references" => :gitlab_main, + "_test_gitlab_hook_logs" => :gitlab_main, + "_test_gitlab_ci_items" => :gitlab_ci, + "_test_gitlab_ci_references" => :gitlab_ci, + "_test_gitlab_shared_items" => :gitlab_shared, + "_test_gitlab_geo_items" => :gitlab_geo, + "detached_partitions" => :gitlab_shared, + "postgres_foreign_keys" => :gitlab_shared, + "postgres_partitions" => :gitlab_shared + } + ) + + allow(logger).to receive(:info).with(any_args) + end + shared_examples 'truncating legacy tables on a database' do - let(:logger) { instance_double(Logger) } let(:dry_run) { false } let(:until_table) { nil } @@ -47,122 +163,6 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba ).execute end - before do - skip_if_shared_database(:ci) - - # Creating some test tables on the main database - main_tables_sql = <<~SQL - CREATE TABLE _test_gitlab_main_items (id serial NOT NULL PRIMARY KEY); - - CREATE TABLE _test_gitlab_main_references ( - id serial NOT NULL PRIMARY KEY, - item_id BIGINT NOT NULL, - CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id) - ); - - CREATE TABLE _test_gitlab_hook_logs ( - id bigserial not null, - created_at timestamptz not null, - item_id BIGINT NOT NULL, - PRIMARY KEY (id, created_at), - CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id) - ) PARTITION BY RANGE(created_at); - - CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202201 - PARTITION OF _test_gitlab_hook_logs - FOR VALUES FROM ('20220101') TO ('20220131'); - - CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202202 - PARTITION OF _test_gitlab_hook_logs - FOR VALUES FROM ('20220201') TO ('20220228'); - - ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201; - SQL - - execute_on_each_database(main_tables_sql) - - ci_tables_sql = <<~SQL - CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY); - - CREATE TABLE _test_gitlab_ci_references ( - id serial NOT NULL PRIMARY KEY, - item_id BIGINT NOT NULL, - CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_ci_items(id) - ); - SQL - - execute_on_each_database(ci_tables_sql) - - internal_tables_sql = <<~SQL - CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY); - SQL - - execute_on_each_database(internal_tables_sql) - - # Filling the tables - 5.times do |i| - # Main Database - main_db_main_item_model.create!(id: i) - main_db_main_reference_model.create!(item_id: i) - main_db_ci_item_model.create!(id: i) - main_db_ci_reference_model.create!(item_id: i) - main_db_shared_item_model.create!(id: i) - main_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00') - main_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00') - # CI Database - ci_db_main_item_model.create!(id: i) - ci_db_main_reference_model.create!(item_id: i) - ci_db_ci_item_model.create!(id: i) - ci_db_ci_reference_model.create!(item_id: i) - ci_db_shared_item_model.create!(id: i) - ci_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00') - ci_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00') - end - - Gitlab::Database::SharedModel.using_connection(main_connection) do - Postgresql::DetachedPartition.create!( - table_name: '_test_gitlab_hook_logs_202201', - drop_after: Time.current - ) - end - - Gitlab::Database::SharedModel.using_connection(ci_connection) do - Postgresql::DetachedPartition.create!( - table_name: '_test_gitlab_hook_logs_202201', - drop_after: Time.current - ) - end - - allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return( - { - "_test_gitlab_main_items" => :gitlab_main, - "_test_gitlab_main_references" => :gitlab_main, - "_test_gitlab_hook_logs" => :gitlab_main, - "_test_gitlab_ci_items" => :gitlab_ci, - "_test_gitlab_ci_references" => :gitlab_ci, - "_test_gitlab_shared_items" => :gitlab_shared, - "_test_gitlab_geo_items" => :gitlab_geo - } - ) - - allow(Gitlab::Database::GitlabSchema).to receive(:views_and_tables_to_schema).and_return( - { - "_test_gitlab_main_items" => :gitlab_main, - "_test_gitlab_main_references" => :gitlab_main, - "_test_gitlab_hook_logs" => :gitlab_main, - "_test_gitlab_ci_items" => :gitlab_ci, - "_test_gitlab_ci_references" => :gitlab_ci, - "_test_gitlab_shared_items" => :gitlab_shared, - "_test_gitlab_geo_items" => :gitlab_geo, - "detached_partitions" => :gitlab_shared, - "postgres_foreign_keys" => :gitlab_shared, - "postgres_partitions" => :gitlab_shared - } - ) - - allow(logger).to receive(:info).with(any_args) - end - context 'when the truncated tables are not locked for writes' do it 'raises an error that the tables are not locked for writes' do error_message = /is not locked for writes. Run the rake task gitlab:db:lock_writes first/ @@ -348,6 +348,50 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba end end + describe '#needs_truncation?' do + let(:database_name) { 'ci' } + + subject { described_class.new(database_name: database_name).needs_truncation? } + + context 'when running in a single database mode' do + before do + skip_if_multiple_databases_are_setup(:ci) + end + + it { is_expected.to eq(false) } + end + + context 'when running in a multiple database mode' do + before do + skip_if_shared_database(:ci) + end + + context 'with main data in ci database' do + it { is_expected.to eq(true) } + end + + context 'with no main data in ci datatabase' do + before do + # Remove 'main' data in ci database + ci_connection.truncate_tables([:_test_gitlab_main_items, :_test_gitlab_main_references]) + end + + it { is_expected.to eq(false) } + + it 'supresses some QueryAnalyzers' do + expect( + Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection + ).to receive(:with_suppressed).and_call_original + expect( + Gitlab::Database::QueryAnalyzers::Ci::PartitioningRoutingAnalyzer + ).to receive(:with_suppressed).and_call_original + + subject + end + end + end + end + def geo_configured? !!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'geo') end diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index 0d8fa4dad6d..6dd7d29ab42 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -198,59 +198,6 @@ RSpec.describe Gitlab::Database, feature_category: :database do end end - describe '.check_postgres_version_and_print_warning' do - let(:reflect) { instance_spy(Gitlab::Database::Reflection) } - - subject { described_class.check_postgres_version_and_print_warning } - - before do - allow(Gitlab::Database::Reflection) - .to receive(:new) - .and_return(reflect) - end - - it 'prints a warning if not compliant with minimum postgres version' do - allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false) - - expect(Kernel) - .to receive(:warn) - .with(/You are using PostgreSQL/) - .exactly(described_class.database_base_models.length) - .times - - subject - end - - it 'doesnt print a warning if compliant with minimum postgres version' do - allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(true) - - expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/) - - subject - end - - it 'doesnt print a warning in Rails runner environment' do - allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false) - allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true) - - expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/) - - subject - end - - it 'ignores ActiveRecord errors' do - allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError) - - expect { subject }.not_to raise_error - end - - it 'ignores Postgres errors' do - allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error) - - expect { subject }.not_to raise_error - end - end - describe '.db_config_for_connection' do context 'when the regular connection is used' do it 'returns db_config' do diff --git a/spec/lib/gitlab/database_warnings_spec.rb b/spec/lib/gitlab/database_warnings_spec.rb new file mode 100644 index 00000000000..6658190b94c --- /dev/null +++ b/spec/lib/gitlab/database_warnings_spec.rb @@ -0,0 +1,96 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::DatabaseWarnings, feature_category: :database do + describe '.check_postgres_version_and_print_warning' do + let(:reflect) { instance_spy(Gitlab::Database::Reflection) } + + subject { described_class.check_postgres_version_and_print_warning } + + before do + allow(Gitlab::Database::Reflection) + .to receive(:new) + .and_return(reflect) + end + + it 'prints a warning if not compliant with minimum postgres version' do + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false) + + expect(Kernel) + .to receive(:warn) + .with(/You are using PostgreSQL/) + .exactly(Gitlab::Database.database_base_models.length) + .times + + subject + end + + it 'does not print a warning if compliant with minimum postgres version' do + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(true) + + expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/) + + subject + end + + it 'does not print a warning in Rails runner environment' do + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false) + allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true) + + expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/) + + subject + end + + it 'ignores ActiveRecord errors' do + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError) + + expect { subject }.not_to raise_error + end + + it 'ignores Postgres errors' do + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error) + + expect { subject }.not_to raise_error + end + end + + describe '.check_single_connection_and_print_warning' do + subject { described_class.check_single_connection_and_print_warning } + + it 'prints a warning if single connection' do + allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_SINGLE_DATABASE) + + expect(Kernel).to receive(:warn).with(/Your database has a single connection/) + + subject + end + + it 'does not print a warning if single ci connection' do + allow(Gitlab::Database).to receive(:database_mode) + .and_return(Gitlab::Database::MODE_SINGLE_DATABASE_CI_CONNECTION) + + expect(Kernel).not_to receive(:warn) + + subject + end + + it 'does not print a warning if multiple connection' do + allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_MULTIPLE_DATABASES) + + expect(Kernel).not_to receive(:warn) + + subject + end + + it 'does not print a warning in Rails runner environment' do + allow(Gitlab::Database).to receive(:database_mode).and_return(Gitlab::Database::MODE_SINGLE_DATABASE) + allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true) + + expect(Kernel).not_to receive(:warn) + + subject + end + end +end diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb index e3b0e90bff9..c7b69f39951 100644 --- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb @@ -68,7 +68,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do end context 'when the issue is a Service Desk issue' do - let(:original_recipient) { User.support_bot } + let(:original_recipient) { Users::Internal.support_bot } it 'does not raise a UserNotFoundError' do expect { receiver.execute }.not_to raise_error @@ -209,7 +209,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do context 'when note is authored from external author for service desk' do before do - SentNotification.find_by(reply_key: mail_key).update!(recipient: User.support_bot) + SentNotification.find_by(reply_key: mail_key).update!(recipient: Users::Internal.support_bot) end context 'when email contains text, quoted text and quick commands' do diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb index 98522c53a47..6941ebd2e11 100644 --- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb @@ -38,7 +38,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se new_issue = Issue.last - expect(new_issue.author).to eql(User.support_bot) + expect(new_issue.author).to eql(Users::Internal.support_bot) expect(new_issue.confidential?).to be true expect(new_issue.all_references.all).to be_empty expect(new_issue.title).to eq("The message subject! @all") @@ -131,7 +131,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se expect(notes.count).to eq(1) expect(new_note.note).to eq("Service desk reply!\n\n`/label ~label2`") - expect(new_note.author).to eql(User.support_bot) + expect(new_note.author).to eql(Users::Internal.support_bot) end it 'does not send thank you email' do @@ -267,7 +267,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se issue = Issue.last expect(issue.description).to include('Text from service_desk2 template') expect(issue.label_ids).to include(label.id) - expect(issue.author_id).to eq(User.support_bot.id) + expect(issue.author_id).to eq(Users::Internal.support_bot.id) expect(issue.milestone).to eq(milestone) end end @@ -294,7 +294,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler, feature_category: :se note = Note.last expect(note.note).to include("WARNING: The template file unknown.md used for service desk issues is empty or could not be found.") - expect(note.author).to eq(User.support_bot) + expect(note.author).to eq(Users::Internal.support_bot) end it 'does not send warning note email' do diff --git a/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb deleted file mode 100644 index 7a09feb5b64..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::AdminVerify do - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - let(:series) { 0 } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe 'public methods' do - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to eq 'Create a custom CI runner with just a few clicks' - expect(message.tagline).to be_nil - expect(message.title).to eq 'Spin up an autoscaling runner in GitLab' - expect(message.subtitle).to eq 'Use our AWS cloudformation template to spin up your runners in just a few clicks!' - expect(message.body_line1).to be_empty - expect(message.body_line2).to be_empty - expect(message.cta_text).to eq 'Create a custom runner' - expect(message.logo_path).to eq 'mailers/in_product_marketing/admin_verify-0.png' - end - - describe '#progress' do - subject { message.progress } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to eq('This is email 1 of 1 in the Admin series.') } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include('This is email 1 of 1 in the Admin series', Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb deleted file mode 100644 index ab6b1cd6171..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb +++ /dev/null @@ -1,108 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - let(:series) { 0 } - let(:test_class) { Gitlab::Email::Message::InProductMarketing::Create } - - describe 'initialize' do - subject { test_class.new(group: group, user: user, series: series) } - - context 'when series does not exist' do - let(:series) { 3 } - - it 'raises error' do - expect { subject }.to raise_error(ArgumentError) - end - end - - context 'when series exists' do - let(:series) { 0 } - - it 'does not raise error' do - expect { subject }.not_to raise_error - end - end - end - - describe '#logo_path' do - subject { test_class.new(group: group, user: user, series: series).logo_path } - - it { is_expected.to eq('mailers/in_product_marketing/create-0.png') } - end - - describe '#unsubscribe' do - subject { test_class.new(group: group, user: user, series: series).unsubscribe } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to include('%tag_unsubscribe_url%') } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include(Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - - describe '#cta_link' do - subject(:cta_link) { test_class.new(group: group, user: user, series: series).cta_link } - - it 'renders link' do - expect(CGI.unescapeHTML(cta_link)).to include(Gitlab::Routing.url_helpers.group_email_campaigns_url(group, track: :create, series: series)) - end - end - - describe '#progress' do - subject { test_class.new(group: group, user: user, series: series).progress } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to include('This is email 1 of 3 in the Create series') } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include('This is email 1 of 3 in the Create series', Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - - describe '#series?' do - using RSpec::Parameterized::TableSyntax - - subject do - test_class = "Gitlab::Email::Message::InProductMarketing::#{track.to_s.classify}".constantize - test_class.new(group: group, user: user, series: series).series? - end - - where(:track, :result) do - :create | true - :team_short | true - :trial_short | true - :admin_verify | true - :verify | true - :trial | true - :team | true - end - - with_them do - it { is_expected.to eq result } - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb deleted file mode 100644 index d5aec280ea6..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::Create do - using RSpec::Parameterized::TableSyntax - - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe "public methods" do - where(series: [0, 1, 2]) - - with_them do - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to be_present - expect(message.tagline).to be_present - expect(message.title).to be_present - expect(message.subtitle).to be_present - expect(message.body_line1).to be_present - expect(message.body_line2).to be_present - expect(message.cta_text).to be_present - end - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb deleted file mode 100644 index 3ac2076bf35..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::TeamShort do - using RSpec::Parameterized::TableSyntax - - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - let(:series) { 0 } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe 'public methods' do - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to eq 'Team up in GitLab for greater efficiency' - expect(message.tagline).to be_nil - expect(message.title).to eq 'Turn coworkers into collaborators' - expect(message.subtitle).to eq 'Invite your team today to build better code (and processes) together' - expect(message.body_line1).to be_empty - expect(message.body_line2).to be_empty - expect(message.cta_text).to eq 'Invite your colleagues today' - expect(message.logo_path).to eq 'mailers/in_product_marketing/team-0.png' - end - - describe '#progress' do - subject { message.progress } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to include('This is email 1 of 4 in the Team series') } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include('This is email 1 of 4 in the Team series', Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb deleted file mode 100644 index 3354b2ed5cf..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb +++ /dev/null @@ -1,82 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do - using RSpec::Parameterized::TableSyntax - - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe "public methods" do - where(series: [0, 1]) - - with_them do - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to be_present - expect(message.tagline).to be_present - expect(message.title).to be_present - expect(message.subtitle).to be_present - expect(message.body_line1).to be_present - expect(message.body_line2).to be_present - expect(message.cta_text).to be_present - end - - describe '#progress' do - subject { message.progress } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to include("This is email #{series + 2} of 4 in the Team series") } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include("This is email #{series + 2} of 4 in the Team series", Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - end - - context 'with series 2' do - let(:series) { 2 } - - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to be_present - expect(message.tagline).to be_nil - expect(message.title).to be_present - expect(message.subtitle).to be_present - expect(message.body_line1).to be_present - expect(message.body_line2).to be_present - expect(message.cta_text).to be_present - end - - describe '#progress' do - subject { message.progress } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to include('This is email 4 of 4 in the Team series') } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include('This is email 4 of 4 in the Team series', Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb deleted file mode 100644 index cf0a119ea80..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::TrialShort do - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - let(:series) { 0 } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe 'public methods' do - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to eq 'Be a DevOps hero' - expect(message.tagline).to be_nil - expect(message.title).to eq 'Expand your DevOps journey with a free GitLab trial' - expect(message.subtitle).to eq 'Start your trial today to experience single application success and discover all the features of GitLab Ultimate for free!' - expect(message.body_line1).to be_empty - expect(message.body_line2).to be_empty - expect(message.cta_text).to eq 'Start a trial' - expect(message.logo_path).to eq 'mailers/in_product_marketing/trial-0.png' - end - - describe '#progress' do - subject { message.progress } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to eq('This is email 1 of 4 in the Trial series.') } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include('This is email 1 of 4 in the Trial series', Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb deleted file mode 100644 index 7f86c9a6c6f..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb +++ /dev/null @@ -1,48 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::Trial do - using RSpec::Parameterized::TableSyntax - - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe "public methods" do - where(series: [0, 1, 2]) - - with_them do - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to be_present - expect(message.tagline).to be_present - expect(message.title).to be_present - expect(message.subtitle).to be_present - expect(message.body_line1).to be_present - expect(message.body_line2).to be_present - expect(message.cta_text).to be_present - end - - describe '#progress' do - subject { message.progress } - - before do - allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - end - - context 'on gitlab.com' do - let(:is_gitlab_com) { true } - - it { is_expected.to eq("This is email #{series + 2} of 4 in the Trial series.") } - end - - context 'not on gitlab.com' do - let(:is_gitlab_com) { false } - - it { is_expected.to include("This is email #{series + 2} of 4 in the Trial series", Gitlab::Routing.url_helpers.profile_notifications_url) } - end - end - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb deleted file mode 100644 index 7e6f62289d2..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing::Verify do - let_it_be(:group) { build(:group) } - let_it_be(:user) { build(:user) } - - subject(:message) { described_class.new(group: group, user: user, series: series) } - - describe "public methods" do - context 'with series 0' do - let(:series) { 0 } - - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to be_present - expect(message.tagline).to be_present - expect(message.title).to be_present - expect(message.subtitle).to be_present - expect(message.body_line1).to be_present - expect(message.body_line2).to be_nil - expect(message.cta_text).to be_present - end - end - - context 'with series 1' do - let(:series) { 1 } - - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to be_present - expect(message.tagline).to be_present - expect(message.title).to be_present - expect(message.subtitle).to be_present - expect(message.body_line1).to be_present - expect(message.body_line2).to be_present - expect(message.cta_text).to be_present - end - end - - context 'with series 2' do - let(:series) { 2 } - - it 'returns value for series', :aggregate_failures do - expect(message.subject_line).to be_present - expect(message.tagline).to be_present - expect(message.title).to be_present - expect(message.subtitle).to be_present - expect(message.body_line1).to be_present - expect(message.body_line2).to be_nil - expect(message.cta_text).to be_present - end - end - end -end diff --git a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb deleted file mode 100644 index 1c59d9c8208..00000000000 --- a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Email::Message::InProductMarketing do - describe '.for' do - using RSpec::Parameterized::TableSyntax - - subject { described_class.for(track) } - - context 'when track exists' do - where(:track, :expected_class) do - :create | described_class::Create - :team_short | described_class::TeamShort - :trial_short | described_class::TrialShort - :admin_verify | described_class::AdminVerify - :verify | described_class::Verify - :trial | described_class::Trial - :team | described_class::Team - end - - with_them do - it { is_expected.to eq(expected_class) } - end - end - - context 'when track does not exist' do - let(:track) { :non_existent } - - it 'raises error' do - expect { subject }.to raise_error(described_class::UnknownTrackError) - end - end - end -end diff --git a/spec/lib/gitlab/email/service_desk/custom_email_spec.rb b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb new file mode 100644 index 00000000000..bba1ca1c8be --- /dev/null +++ b/spec/lib/gitlab/email/service_desk/custom_email_spec.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Email::ServiceDesk::CustomEmail, feature_category: :service_desk do + let(:reply_key) { 'b7721fc7e8419911a8bea145236a0519' } + let(:custom_email) { 'support@example.com' } + let(:email_with_reply_key) { 'support+b7721fc7e8419911a8bea145236a0519@example.com' } + + describe '.reply_address' do + let_it_be(:project) { create(:project) } + + subject(:reply_address) { described_class.reply_address(nil, nil) } + + it { is_expected.to be nil } + + context 'with reply key' do + subject(:reply_address) { described_class.reply_address(nil, reply_key) } + + it { is_expected.to be nil } + + context 'with issue' do + let_it_be(:issue) { create(:issue, project: project) } + + subject(:reply_address) { described_class.reply_address(issue, reply_key) } + + it { is_expected.to be nil } + + context 'with service_desk_setting and custom email' do + let!(:service_desk_setting) { create(:service_desk_setting, custom_email: custom_email, project: project) } + + it { is_expected.to eq(email_with_reply_key) } + end + end + end + end +end diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb index d25511843ff..8df4a9fa84a 100644 --- a/spec/lib/gitlab/etag_caching/middleware_spec.rb +++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb @@ -7,8 +7,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state let(:middleware) { described_class.new(app) } let(:app_status_code) { 200 } let(:if_none_match) { nil } - let(:enabled_path) { '/gitlab-org/gitlab-foss/noteable/issue/1/notes' } - let(:endpoint) { 'issue_notes' } + let(:enabled_path) { '/gitlab-org/gitlab-foss/commit/aaaaaaaa/pipelines.json' } + let(:endpoint) { 'commit_pipelines' } describe '.skip!' do it 'sets the skip header on the response' do @@ -124,12 +124,12 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state method: 'GET', path: enabled_path, status: status_code, - request_urgency: :medium, - target_duration_s: 0.5, + request_urgency: :low, + target_duration_s: 5, metadata: a_hash_including( { - 'meta.caller_id' => 'Projects::NotesController#index', - 'meta.feature_category' => 'team_planning' + 'meta.caller_id' => 'Projects::CommitController#pipelines', + 'meta.feature_category' => 'source_code_management' } ) } @@ -185,8 +185,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state it "pushes expected information in to the context" do expect(Gitlab::ApplicationContext).to receive(:push).with( - feature_category: 'team_planning', - caller_id: 'Projects::NotesController#index', + feature_category: 'source_code_management', + caller_id: 'Projects::CommitController#pipelines', remote_ip: '127.0.0.1' ) diff --git a/spec/lib/gitlab/etag_caching/router/rails_spec.rb b/spec/lib/gitlab/etag_caching/router/rails_spec.rb index 251f634aac1..de260f43dfb 100644 --- a/spec/lib/gitlab/etag_caching/router/rails_spec.rb +++ b/spec/lib/gitlab/etag_caching/router/rails_spec.rb @@ -3,20 +3,6 @@ require 'spec_helper' RSpec.describe Gitlab::EtagCaching::Router::Rails do - it 'matches issue notes endpoint' do - result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/issue/1/notes') - - expect(result).to be_present - expect(result.name).to eq 'issue_notes' - end - - it 'matches MR notes endpoint' do - result = match_route('/my-group/and-subgroup/here-comes-the-project/noteable/merge_request/1/notes') - - expect(result).to be_present - expect(result.name).to eq 'merge_request_notes' - end - it 'matches issue title endpoint' do result = match_route('/my-group/my-project/-/issues/123/realtime_changes') diff --git a/spec/lib/gitlab/etag_caching/store_spec.rb b/spec/lib/gitlab/etag_caching/store_spec.rb index 6188a3fc8b3..117480f2a99 100644 --- a/spec/lib/gitlab/etag_caching/store_spec.rb +++ b/spec/lib/gitlab/etag_caching/store_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do +RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_cache do let(:store) { described_class.new } describe '#get' do diff --git a/spec/lib/gitlab/event_store/store_spec.rb b/spec/lib/gitlab/event_store/store_spec.rb index bbdfecc897a..04d0706c130 100644 --- a/spec/lib/gitlab/event_store/store_spec.rb +++ b/spec/lib/gitlab/event_store/store_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::EventStore::Store do +RSpec.describe Gitlab::EventStore::Store, feature_category: :shared do let(:event_klass) { stub_const('TestEvent', Class.new(Gitlab::EventStore::Event)) } let(:event) { event_klass.new(data: data) } let(:another_event_klass) { stub_const('TestAnotherEvent', Class.new(Gitlab::EventStore::Event)) } @@ -222,8 +222,6 @@ RSpec.describe Gitlab::EventStore::Store do end end - let(:event) { event_klass.new(data: data) } - it 'dispatches the event to the workers satisfying the condition' do expect(worker).to receive(:perform_async).with('TestEvent', serialized_data) expect(another_worker).not_to receive(:perform_async) @@ -232,6 +230,20 @@ RSpec.describe Gitlab::EventStore::Store do end end + context 'when subscription has delayed dispatching of event' do + let(:store) do + described_class.new do |s| + s.subscribe worker, to: event_klass, delay: 1.minute + end + end + + it 'dispatches the event to the worker after some time' do + expect(worker).to receive(:perform_in).with(1.minute, 'TestEvent', serialized_data) + + store.publish(event) + end + end + context 'when the event does not have any subscribers' do let(:store) do described_class.new do |s| @@ -239,8 +251,6 @@ RSpec.describe Gitlab::EventStore::Store do end end - let(:event) { event_klass.new(data: data) } - it 'returns successfully' do expect { store.publish(event) }.not_to raise_error end diff --git a/spec/lib/gitlab/experiment/rollout/feature_spec.rb b/spec/lib/gitlab/experiment/rollout/feature_spec.rb index a66f4fea207..cd46e7b3386 100644 --- a/spec/lib/gitlab/experiment/rollout/feature_spec.rb +++ b/spec/lib/gitlab/experiment/rollout/feature_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Experiment::Rollout::Feature, :experiment do - subject { described_class.new.for(subject_experiment) } + subject { described_class.new(subject_experiment) } let(:subject_experiment) { experiment('namespaced/stub') } diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb index d21ac36bf34..77361b09857 100644 --- a/spec/lib/gitlab/git/blame_spec.rb +++ b/spec/lib/gitlab/git/blame_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" -RSpec.describe Gitlab::Git::Blame do +RSpec.describe Gitlab::Git::Blame, feature_category: :source_code_management do let(:project) { create(:project, :repository) } let(:repository) { project.repository.raw } let(:sha) { TestEnv::BRANCH_SHA['master'] } @@ -38,6 +38,14 @@ RSpec.describe Gitlab::Git::Blame do expect(result.size).to eq(range.size) expect(result.map { |r| r[:line] }).to eq(['', 'This guide details how contribute to GitLab.', '']) end + + context 'when range is outside of the file content range' do + let(:range) { 9999..10000 } + + it 'returns an empty array' do + expect(result).to eq([]) + end + end end context "ISO-8859 encoding" do diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb index 6745c700b92..4d78e194da8 100644 --- a/spec/lib/gitlab/git/diff_spec.rb +++ b/spec/lib/gitlab/git/diff_spec.rb @@ -131,6 +131,31 @@ EOT expect(diff.diff).to be_utf8 end end + + context 'using a diff that it too large but collecting all paths' do + let(:gitaly_diff) do + Gitlab::GitalyClient::Diff.new( + from_path: '.gitmodules', + to_path: '.gitmodules', + old_mode: 0100644, + new_mode: 0100644, + from_id: '0792c58905eff3432b721f8c4a64363d8e28d9ae', + to_id: 'efd587ccb47caf5f31fc954edb21f0a713d9ecc3', + overflow_marker: true, + collapsed: false, + too_large: false, + patch: '' + ) + end + + let(:diff) { described_class.new(gitaly_diff) } + + it 'is already pruned and collapsed but not too large' do + expect(diff.diff).to be_empty + expect(diff).not_to be_too_large + expect(diff).to be_collapsed + end + end end context 'using a Gitaly::CommitDelta' do diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index e27b97ea0e6..18a090a00be 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -589,6 +589,37 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen end end + describe '#update_refs' do + let(:repository) { mutable_repository } + let(:sha) { TestEnv::BRANCH_SHA['master'] } + let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" } + + before do + repository.write_ref(tmp_ref, sha) + end + + it 'updates the ref' do + expect do + repository.update_refs( + [ + { + old_sha: sha, + new_sha: Gitlab::Git::BLANK_SHA, + reference: tmp_ref + } + ] + ) + end.to change { repository.ref_exists?(tmp_ref) } + .from(true).to(false) + end + + it 'does not call gitaly when no refs given' do + expect_any_instance_of(Gitlab::GitalyClient::RefService).not_to receive(:update_refs) + + repository.update_refs([]) + end + end + describe '#delete_refs' do let(:repository) { mutable_repository } diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb index 9ba021e838e..7916481a853 100644 --- a/spec/lib/gitlab/git_access_snippet_spec.rb +++ b/spec/lib/gitlab/git_access_snippet_spec.rb @@ -13,7 +13,7 @@ RSpec.describe Gitlab::GitAccessSnippet do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :public) } let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) } - let_it_be(:migration_bot) { User.migration_bot } + let_it_be(:migration_bot) { Users::Internal.migration_bot } let(:repository) { snippet.repository } let(:actor) { user } diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb index 9055b284119..bd0341d51bf 100644 --- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb @@ -567,20 +567,58 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source end end - describe '#user_cherry_pick' do + describe '#user_cherry_pick', :freeze_time do let(:response_class) { Gitaly::UserCherryPickResponse } + let(:sha) { '54cec5282aa9f21856362fe321c800c236a61615' } + let(:branch_name) { 'master' } + let(:cherry_pick_message) { 'Cherry-pick message' } + let(:time) { Time.now.utc } + + let(:branch_update) do + Gitaly::OperationBranchUpdate.new( + commit_id: sha, + repo_created: false, + branch_created: false + ) + end + + let(:request) do + Gitaly::UserCherryPickRequest.new( + repository: repository.gitaly_repository, + user: gitaly_user, + commit: repository.commit.to_gitaly_commit, + branch_name: branch_name, + start_branch_name: branch_name, + start_repository: repository.gitaly_repository, + message: cherry_pick_message, + timestamp: Google::Protobuf::Timestamp.new(seconds: time.to_i) + ) + end + + let(:response) { Gitaly::UserCherryPickResponse.new(branch_update: branch_update) } subject do client.user_cherry_pick( user: user, commit: repository.commit, - branch_name: 'master', - message: 'Cherry-pick message', - start_branch_name: 'master', + branch_name: branch_name, + message: cherry_pick_message, + start_branch_name: branch_name, start_repository: repository ) end + it 'sends a user_cherry_pick message and returns a BranchUpdate' do + expect_any_instance_of(Gitaly::OperationService::Stub) + .to receive(:user_cherry_pick).with(request, kind_of(Hash)) + .and_return(response) + + expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate) + expect(subject.newrev).to be_present + expect(subject.repo_created).to be(false) + expect(subject.branch_created).to be(false) + end + context 'when AccessCheckError is raised' do let(:raised_error) do new_detailed_error( @@ -641,27 +679,68 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source end end - describe '#user_revert' do - let(:response_class) { Gitaly::UserRevertResponse } + describe '#user_revert', :freeze_time do + let(:sha) { '54cec5282aa9f21856362fe321c800c236a61615' } + let(:branch_name) { 'master' } + let(:revert_message) { 'revert message' } + let(:time) { Time.now.utc } + + let(:branch_update) do + Gitaly::OperationBranchUpdate.new( + commit_id: sha, + repo_created: false, + branch_created: false + ) + end + + let(:request) do + Gitaly::UserRevertRequest.new( + repository: repository.gitaly_repository, + user: gitaly_user, + commit: repository.commit.to_gitaly_commit, + branch_name: branch_name, + start_branch_name: branch_name, + start_repository: repository.gitaly_repository, + message: revert_message, + timestamp: Google::Protobuf::Timestamp.new(seconds: time.to_i) + ) + end + + let(:response) { Gitaly::UserRevertResponse.new(branch_update: branch_update) } subject do client.user_revert( user: user, commit: repository.commit, - branch_name: 'master', - message: 'Revert message', - start_branch_name: 'master', + branch_name: branch_name, + message: revert_message, + start_branch_name: branch_name, start_repository: repository ) end - before do + it 'sends a user_revert message and returns a BranchUpdate' do expect_any_instance_of(Gitaly::OperationService::Stub) - .to receive(:user_revert).with(kind_of(Gitaly::UserRevertRequest), kind_of(Hash)) - .and_return(response) + .to receive(:user_revert).with(request, kind_of(Hash)) + .and_return(response) + + expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate) + expect(subject.newrev).to be_present + expect(subject.repo_created).to be(false) + expect(subject.branch_created).to be(false) end - it_behaves_like 'cherry pick and revert errors' + context 'when errors are raised' do + let(:response_class) { Gitaly::UserRevertResponse } + + before do + expect_any_instance_of(Gitaly::OperationService::Stub) + .to receive(:user_revert).with(kind_of(Gitaly::UserRevertRequest), kind_of(Hash)) + .and_return(response) + end + + it_behaves_like 'cherry pick and revert errors' + end end describe '#rebase' do diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb index fe04ad36e9a..ae9276cf90b 100644 --- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb @@ -314,6 +314,116 @@ RSpec.describe Gitlab::GitalyClient::RefService, feature_category: :gitaly do end end + describe '#update_refs' do + let(:old_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } + let(:new_sha) { Gitlab::Git::EMPTY_TREE_ID } + let(:reference) { 'refs/does/not/exist' } + let(:expected_param) do + Gitaly::UpdateReferencesRequest::Update.new( + old_object_id: old_sha, + new_object_id: new_sha, + reference: reference + ) + end + + let(:ref_list) do + [ + { + old_sha: old_sha, + new_sha: new_sha, + reference: reference + } + ] + end + + subject(:update_refs) { client.update_refs(ref_list: ref_list) } + + it 'sends a update_refs message' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:update_references) + .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash)) + .and_return(double('update_refs_response', git_error: "")) + + update_refs + end + + context 'with a generic BadStatus error' do + let(:generic_error) do + GRPC::BadStatus.new( + GRPC::Core::StatusCodes::FAILED_PRECONDITION, + "error message" + ) + end + + it 'raises the BadStatus error' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:update_references) + .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash)) + .and_raise(generic_error) + + expect { update_refs }.to raise_error(GRPC::BadStatus) + end + end + + context 'with a reference state mismatch error' do + let(:reference_state_mismatch_error) do + new_detailed_error( + GRPC::Core::StatusCodes::FAILED_PRECONDITION, + "error message", + Gitaly::UpdateReferencesError.new(reference_state_mismatch: Gitaly::ReferenceStateMismatchError.new)) + end + + it 'raises ReferencesLockedError' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:update_references) + .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash)) + .and_raise(reference_state_mismatch_error) + + expect { update_refs }.to raise_error(Gitlab::Git::ReferenceStateMismatchError) + end + end + + context 'with a references locked error' do + let(:references_locked_error) do + new_detailed_error( + GRPC::Core::StatusCodes::FAILED_PRECONDITION, + "error message", + Gitaly::UpdateReferencesError.new(references_locked: Gitaly::ReferencesLockedError.new)) + end + + it 'raises ReferencesLockedError' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:update_references) + .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash)) + .and_raise(references_locked_error) + + expect { update_refs }.to raise_error(Gitlab::Git::ReferencesLockedError) + end + end + + context 'with a invalid format error' do + let(:invalid_refs) { ['\invali.\d/1', '\.invali/d/2'] } + let(:invalid_reference_format_error) do + new_detailed_error( + GRPC::Core::StatusCodes::INVALID_ARGUMENT, + "error message", + Gitaly::UpdateReferencesError.new(invalid_format: Gitaly::InvalidRefFormatError.new(refs: invalid_refs))) + end + + it 'raises InvalidRefFormatError' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:update_references) + .with(array_including(gitaly_request_with_params(updates: [expected_param])), kind_of(Hash)) + .and_raise(invalid_reference_format_error) + + expect { update_refs }.to raise_error do |error| + expect(error).to be_a(Gitlab::Git::InvalidRefFormatError) + expect(error.message).to eq("references have an invalid format: #{invalid_refs.join(",")}") + end + end + end + end + describe '#delete_refs' do let(:prefixes) { %w(refs/heads refs/keep-around) } diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb index d8ae7d70bb2..8e0e4525729 100644 --- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb @@ -319,19 +319,8 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService, feature_category: :gital end end - describe '#create_from_snapshot' do - it 'sends a create_repository_from_snapshot message' do - expect_any_instance_of(Gitaly::RepositoryService::Stub) - .to receive(:create_repository_from_snapshot) - .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) - .and_return(double) - - client.create_from_snapshot('http://example.com?wiki=1', 'Custom xyz') - end - end - describe '#raw_changes_between' do - it 'sends a create_repository_from_snapshot message' do + it 'sends a get_raw_changes message' do expect_any_instance_of(Gitaly::RepositoryService::Stub) .to receive(:get_raw_changes) .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) diff --git a/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb b/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb index 42153a9a3d8..49b4f90cdf9 100644 --- a/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb +++ b/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb @@ -10,6 +10,8 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do end.new end + let_it_be(:group) { create(:group) } + describe '#user_actor' do context 'when user is not available in ApplicationContext' do it 'returns nil' do @@ -40,7 +42,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do describe '#repository, #project_actor, #group_actor' do context 'when normal project repository' do - let_it_be(:project) { create(:project, group: create(:group)) } + let_it_be(:project) { create(:project, group: group) } let(:expected_project) { project } let(:expected_group) { Feature::Gitaly::ActorWrapper.new(::Group, project.group.id) } @@ -58,7 +60,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do end context 'when project wiki repository' do - let_it_be(:project) { create(:project, :wiki_repo, group: create(:group)) } + let_it_be(:project) { create(:project, :wiki_repo, group: group) } let(:expected_project) { nil } let(:expected_group) { nil } @@ -112,7 +114,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do end context 'when project snippet' do - let_it_be(:project) { create(:project, group: create(:group)) } + let_it_be(:project) { create(:project, group: group) } let(:snippet) { create(:project_snippet, project: project) } let(:expected_project) { nil } let(:expected_group) { nil } @@ -131,23 +133,20 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do end context 'when project design' do - let_it_be(:design_repo) do - create(:design_management_repository, project: create(:project, group: create(:group))) - end - - let(:expected_project) { design_repo.project } - let(:expected_group) { design_repo.project.group } + let_it_be(:project) { create(:project_with_design, group: group) } + let(:expected_project) { project } + let(:expected_group) { group } it_behaves_like 'Gitaly feature flag actors are inferred from repository' do - let(:repository) { design_repo.repository } + let(:repository) { project.design_repository } end it_behaves_like 'Gitaly feature flag actors are inferred from repository' do - let(:repository) { design_repo.repository.raw } + let(:repository) { project.design_repository.raw } end it_behaves_like 'Gitaly feature flag actors are inferred from repository' do - let(:repository) { raw_repo_without_container(design_repo.repository) } + let(:repository) { raw_repo_without_container(project.design_repository) } end end end diff --git a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb index 086aa4be17e..72d8a9c0403 100644 --- a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb +++ b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb @@ -93,6 +93,57 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i expect(File.basename(file)).to eq('av.png') end end + + context 'when attachment is behind a redirect' do + let_it_be(:file_url) { "https://github.com/test/project/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11" } + let(:redirect_url) { "https://https://github-production-user-asset-6210df.s3.amazonaws.com/142635249/740edb05293e.jpg" } + let(:sample_response) do + instance_double(HTTParty::Response, redirection?: true, headers: { location: redirect_url }) + end + + it 'gets redirection url' do + expect(Gitlab::HTTP).to receive(:perform_request) + .with(Net::HTTP::Get, file_url, { follow_redirects: false }) + .and_return sample_response + + expect(Gitlab::HTTP).to receive(:perform_request) + .with(Net::HTTP::Get, redirect_url, stream_body: true).and_yield(chunk_double) + + file = downloader.perform + + expect(File.exist?(file.path)).to eq(true) + end + + context 'when url is not a redirection' do + let(:sample_response) do + instance_double(HTTParty::Response, code: 200, redirection?: false) + end + + before do + allow(Gitlab::HTTP).to receive(:perform_request) + .with(Net::HTTP::Get, file_url, { follow_redirects: false }) + .and_return sample_response + end + + it 'raises upon unsuccessful redirection' do + expect { downloader.perform }.to raise_error("expected a redirect response, got #{sample_response.code}") + end + end + + context 'when redirection url is not supported' do + let(:redirect_url) { "https://https://github-production-user-asset-6210df.s3.amazonaws.com/142635249/740edb05293e.idk" } + + before do + allow(Gitlab::HTTP).to receive(:perform_request) + .with(Net::HTTP::Get, file_url, { follow_redirects: false }) + .and_return sample_response + end + + it 'raises UnsupportedAttachmentError on unsupported extension' do + expect { downloader.perform }.to raise_error(described_class::UnsupportedAttachmentError) + end + end + end end describe '#delete' do diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb index c9f7fd4f748..4b0d61e3188 100644 --- a/spec/lib/gitlab/github_import/client_spec.rb +++ b/spec/lib/gitlab/github_import/client_spec.rb @@ -20,11 +20,29 @@ RSpec.describe Gitlab::GithubImport::Client, feature_category: :importers do end describe '#user' do + let(:status_code) { 200 } + let(:body) { { id: 1 } } + let(:headers) { { 'Content-Type' => 'application/json' } } + + before do + stub_request(:get, 'https://api.github.com/users/foo') + .to_return(status: status_code, body: body.to_json, headers: headers) + end + + subject(:user) { client.user('foo') } + it 'returns the details for the given username' do - expect(client.octokit).to receive(:user).with('foo') expect(client).to receive(:with_rate_limit).and_yield + expect(user).to eq({ id: 1 }) + end + + context 'when a not modified response is returned' do + let(:status_code) { 304 } - client.user('foo') + it 'returns nil' do + expect(client).to receive(:with_rate_limit).and_yield + expect(user).to eq(nil) + end end end diff --git a/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb index 450ebe9a719..b9829c09cfd 100644 --- a/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb @@ -53,6 +53,19 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteAttachmentsImporter, feature_ record.reload expect(record.description).to include("[link to other project blob file](#{other_project_blob_url})") end + + context 'with new github image format' do + let(:image_url) { 'https://github.com/nickname/public-test-repo/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11' } + let(:image_tag_url) { 'https://github.com/nickname/public-test-repo/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11' } + + it 'changes image attachment links' do + importer.execute + + record.reload + expect(record.description).to include('![image.jpeg](/uploads/') + expect(record.description).to include('tag-image { + "issue" => 0 + }, + "imported" => { + "issue" => 0 + } + } + ) + end + end + context 'when there are no cached import statistics' do context 'when project import is in progress' do it 'includes an empty object counts stats in response' do diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb index 1739425c294..a394b4eba13 100644 --- a/spec/lib/gitlab/github_import/user_finder_spec.rb +++ b/spec/lib/gitlab/github_import/user_finder_spec.rb @@ -37,11 +37,11 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat it 'returns the ID of the ghost user when the object has no user' do note = { author: nil } - expect(finder.author_id_for(note)).to eq([User.ghost.id, true]) + expect(finder.author_id_for(note)).to eq([Users::Internal.ghost.id, true]) end it 'returns the ID of the ghost user when the given object is nil' do - expect(finder.author_id_for(nil)).to eq([User.ghost.id, true]) + expect(finder.author_id_for(nil)).to eq([Users::Internal.ghost.id, true]) end end @@ -208,57 +208,254 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat describe '#email_for_github_username' do let(:email) { 'kittens@example.com' } + let(:username) { 'kittens' } + let(:user) { {} } + let(:etag) { 'etag' } + let(:cache_key) { described_class::EMAIL_FOR_USERNAME_CACHE_KEY % username } + let(:etag_cache_key) { described_class::USERNAME_ETAG_CACHE_KEY % username } + let(:email_fetched_for_project_key) do + format(described_class::EMAIL_FETCHED_FOR_PROJECT_CACHE_KEY, project: project.id, username: username) + end - context 'when an Email address is cached' do - it 'reads the Email address from the cache' do - expect(Gitlab::Cache::Import::Caching) - .to receive(:read) - .and_return(email) + subject(:email_for_github_username) { finder.email_for_github_username(username) } + + shared_examples 'returns and caches the email' do + it 'returns the email' do + expect(email_for_github_username).to eq(email) + end + + it 'caches the email and expires the etag and project check caches' do + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, email).once + expect(Gitlab::Cache::Import::Caching).to receive(:expire).with(etag_cache_key, 0).once + expect(Gitlab::Cache::Import::Caching).to receive(:expire).with(email_fetched_for_project_key, 0).once - expect(client).not_to receive(:user) - expect(finder.email_for_github_username('kittens')).to eq(email) + email_for_github_username + email_for_github_username end end - context 'when an Email address is not cached' do - let(:user) { { email: email } } + shared_examples 'returns nil and caches a negative lookup' do + it 'returns nil' do + expect(email_for_github_username).to be_nil + end - it 'retrieves and caches the Email address when an Email address is available' do - expect(client).to receive(:user).with('kittens').and_return(user).once + it 'caches a blank email and marks the project as checked' do + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, '').once + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything) + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(email_fetched_for_project_key, 1).once - expect(Gitlab::Cache::Import::Caching) - .to receive(:write) - .with(an_instance_of(String), email, timeout: Gitlab::Cache::Import::Caching::TIMEOUT).and_call_original + email_for_github_username + email_for_github_username + end + end - expect(finder.email_for_github_username('kittens')).to eq(email) - expect(finder.email_for_github_username('kittens')).to eq(email) + shared_examples 'does not change caches' do + it 'does not write to any of the caches' do + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(cache_key, anything) + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything) + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(email_fetched_for_project_key, anything) + + email_for_github_username + email_for_github_username end + end - it 'shortens the timeout for Email address in cache when an Email address is private/nil from GitHub' do - user = { email: nil } - expect(client).to receive(:user).with('kittens').and_return(user).once + shared_examples 'a user resource not found on GitHub' do + before do + allow(client).to receive(:user).and_raise(::Octokit::NotFound) + end - expect(Gitlab::Cache::Import::Caching) - .to receive(:write) - .with(an_instance_of(String), '', timeout: Gitlab::Cache::Import::Caching::SHORTER_TIMEOUT) - .and_call_original + it 'returns nil' do + expect(email_for_github_username).to be_nil + end + + it 'caches a blank email' do + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, '').once + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything) + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(email_fetched_for_project_key, anything) + + email_for_github_username + email_for_github_username + end + end + + context 'when the email is cached' do + before do + Gitlab::Cache::Import::Caching.write(cache_key, email) + end + + it 'returns the email from the cache' do + expect(email_for_github_username).to eq(email) + end + + it 'does not make a rate-limited API call' do + expect(client).not_to receive(:user).with(username, { headers: {} }) + + email_for_github_username + email_for_github_username + end + end + + context 'when the email cache is nil' do + context 'if the email has not been checked for the project' do + context 'if the cached etag is nil' do + before do + allow(client).to receive_message_chain(:octokit, :last_response, :headers).and_return({ etag: etag }) + end + + it 'makes an API call' do + expect(client).to receive(:user).with(username, { headers: {} }).and_return({ email: email }).once + + email_for_github_username + end + + context 'if the response contains an email' do + before do + allow(client).to receive(:user).and_return({ email: email }) + end + + it_behaves_like 'returns and caches the email' + end + + context 'if the response does not contain an email' do + before do + allow(client).to receive(:user).and_return({}) + end + + it 'returns nil' do + expect(email_for_github_username).to be_nil + end + + it 'caches a blank email and etag and marks the project as checked' do + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(cache_key, '').once + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(etag_cache_key, etag).once + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(email_fetched_for_project_key, 1).once - expect(finder.email_for_github_username('kittens')).to be_nil - expect(finder.email_for_github_username('kittens')).to be_nil + email_for_github_username + email_for_github_username + end + end + end + + context 'if the cached etag is not nil' do + before do + Gitlab::Cache::Import::Caching.write(etag_cache_key, etag) + end + + it 'makes a non-rate-limited API call' do + expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once + + email_for_github_username + end + + context 'if the response contains an email' do + before do + allow(client).to receive(:user).and_return({ email: email }) + end + + it_behaves_like 'returns and caches the email' + end + + context 'if the response does not contain an email' do + before do + allow(client).to receive(:user).and_return({}) + end + + it_behaves_like 'returns nil and caches a negative lookup' + end + + context 'if the response is nil' do + before do + allow(client).to receive(:user).and_return(nil) + end + + it 'returns nil' do + expect(email_for_github_username).to be_nil + end + + it 'marks the project as checked' do + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(cache_key, anything) + expect(Gitlab::Cache::Import::Caching).not_to receive(:write).with(etag_cache_key, anything) + expect(Gitlab::Cache::Import::Caching).to receive(:write).with(email_fetched_for_project_key, 1).once + + email_for_github_username + email_for_github_username + end + end + end + end + + context 'if the email has been checked for the project' do + before do + Gitlab::Cache::Import::Caching.write(email_fetched_for_project_key, 1) + end + + it 'returns nil' do + expect(email_for_github_username).to be_nil + end + + it_behaves_like 'does not change caches' end - context 'when a username does not exist on GitHub' do - it 'caches github username inexistence' do - expect(client) - .to receive(:user) - .with('kittens') - .and_raise(::Octokit::NotFound) - .once + it_behaves_like 'a user resource not found on GitHub' + end + + context 'when the email cache is blank' do + before do + Gitlab::Cache::Import::Caching.write(cache_key, '') + end + + context 'if the email has not been checked for the project' do + context 'if the cached etag is not nil' do + before do + Gitlab::Cache::Import::Caching.write(etag_cache_key, etag) + end + + it 'makes a non-rate-limited API call' do + expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once + + email_for_github_username + end + + context 'if the response contains an email' do + before do + allow(client).to receive(:user).and_return({ email: email }) + end + + it_behaves_like 'returns and caches the email' + end + + context 'if the response does not contain an email' do + before do + allow(client).to receive(:user).and_return({}) + end - expect(finder.email_for_github_username('kittens')).to be_nil - expect(finder.email_for_github_username('kittens')).to be_nil + it_behaves_like 'returns nil and caches a negative lookup' + end + + context 'if the response is nil' do + before do + allow(client).to receive(:user).and_return(nil) + end + + it_behaves_like 'returns nil and caches a negative lookup' + end + + it_behaves_like 'a user resource not found on GitHub' end end + + context 'if the email has been checked for the project' do + before do + Gitlab::Cache::Import::Caching.write(email_fetched_for_project_key, 1) + end + + it 'returns nil' do + expect(email_for_github_username).to be_nil + end + + it_behaves_like 'does not change caches' + end end end diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb index 898bc40ec1f..8453f002bc0 100644 --- a/spec/lib/gitlab/github_import_spec.rb +++ b/spec/lib/gitlab/github_import_spec.rb @@ -36,7 +36,7 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do end it 'returns the ID of the ghost user', :clean_gitlab_redis_cache do - expect(described_class.ghost_user_id).to eq(User.ghost.id) + expect(described_class.ghost_user_id).to eq(Users::Internal.ghost.id) end it 'caches the ghost user ID', :clean_gitlab_redis_cache do @@ -97,7 +97,7 @@ RSpec.describe Gitlab::GithubImport, feature_category: :importers do end it 'returns the ID of the ghost user', :clean_gitlab_redis_cache do - expect(described_class.ghost_user_id).to eq(User.ghost.id) + expect(described_class.ghost_user_id).to eq(Users::Internal.ghost.id) end it 'caches the ghost user ID', :clean_gitlab_redis_cache do diff --git a/spec/lib/gitlab/gl_repository/identifier_spec.rb b/spec/lib/gitlab/gl_repository/identifier_spec.rb index dbdcafea6d6..bf7a21899f0 100644 --- a/spec/lib/gitlab/gl_repository/identifier_spec.rb +++ b/spec/lib/gitlab/gl_repository/identifier_spec.rb @@ -68,12 +68,10 @@ RSpec.describe Gitlab::GlRepository::Identifier do end describe 'design' do - let(:design_repository_container) { project.design_repository.container } - it_behaves_like 'parsing gl_repository identifier' do let(:record_id) { project.id } - let(:identifier) { "design-#{design_repository_container.id}" } - let(:expected_container) { design_repository_container } + let(:identifier) { "design-#{project.find_or_create_design_management_repository.id}" } + let(:expected_container) { project.design_management_repository } let(:expected_type) { Gitlab::GlRepository::DESIGN } end end diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb index 4ff8137dbd4..807f37b96c9 100644 --- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb +++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb @@ -12,8 +12,6 @@ RSpec.describe Gitlab::GlRepository::RepoType do let(:personal_snippet_path) { "snippets/#{personal_snippet.id}" } let(:project_snippet_path) { "#{project.full_path}/snippets/#{project_snippet.id}" } - let(:expected_repository_resolver) { expected_container } - describe Gitlab::GlRepository::PROJECT do it_behaves_like 'a repo type' do let(:expected_id) { project.id } @@ -136,11 +134,10 @@ RSpec.describe Gitlab::GlRepository::RepoType do describe Gitlab::GlRepository::DESIGN do it_behaves_like 'a repo type' do let(:expected_repository) { project.design_repository } - let(:expected_container) { expected_repository.container } + let(:expected_container) { project.design_management_repository } let(:expected_id) { expected_container.id } let(:expected_identifier) { "design-#{expected_id}" } let(:expected_suffix) { '.design' } - let(:expected_repository_resolver) { project } end it 'uses the design access checker' do @@ -167,15 +164,22 @@ RSpec.describe Gitlab::GlRepository::RepoType do end describe '.project_for' do - it 'returns a project' do - expect(described_class.project_for(project.design_repository.container)).to be_instance_of(Project) + it 'returns a project when container is a design_management_repository' do + expect(described_class.project_for(project.design_management_repository)).to be_instance_of(Project) end end + end - describe '.repository_for' do - it 'returns a DesignManagement::GitRepository when a project is passed' do - expect(described_class.repository_for(project)).to be_instance_of(DesignManagement::GitRepository) - end + describe '.repository_for' do + subject { Gitlab::GlRepository::DESIGN } + + let(:expected_message) do + "Expected container class to be #{subject.container_class} for " \ + "repo type #{subject.name}, but found #{project.class.name} instead." + end + + it 'raises an error when container class does not match given container_class' do + expect { subject.repository_for(project) }.to raise_error(Gitlab::GlRepository::ContainerClassMismatchError, expected_message) end end end diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb index 7be01507a82..b03edcb31a6 100644 --- a/spec/lib/gitlab/gl_repository_spec.rb +++ b/spec/lib/gitlab/gl_repository_spec.rb @@ -4,9 +4,8 @@ require 'spec_helper' RSpec.describe ::Gitlab::GlRepository do describe '.parse' do - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } let_it_be(:snippet) { create(:personal_snippet) } - let(:design_repository_container) { project.design_repository.container } it 'parses a project gl_repository' do expect(described_class.parse("project-#{project.id}")).to eq([project, project, Gitlab::GlRepository::PROJECT]) @@ -21,11 +20,11 @@ RSpec.describe ::Gitlab::GlRepository do end it 'parses a design gl_repository' do - expect(described_class.parse("design-#{design_repository_container.id}")).to eq( + expect(described_class.parse("design-#{project.design_management_repository.id}")).to eq( [ - design_repository_container, - project, - Gitlab::GlRepository::DESIGN + project.design_management_repository, # container + project, # project for container + Gitlab::GlRepository::DESIGN # repo type ] ) end diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb index 1135cfc22ac..fc722402917 100644 --- a/spec/lib/gitlab/gon_helper_spec.rb +++ b/spec/lib/gitlab/gon_helper_spec.rb @@ -58,6 +58,7 @@ RSpec.describe Gitlab::GonHelper do context 'when sentry is configured' do let(:clientside_dsn) { 'https://xxx@sentry.example.com/1' } let(:environment) { 'staging' } + let(:sentry_clientside_traces_sample_rate) { 0.5 } context 'with legacy sentry configuration' do before do @@ -77,6 +78,15 @@ RSpec.describe Gitlab::GonHelper do stub_application_setting(sentry_enabled: true) stub_application_setting(sentry_clientside_dsn: clientside_dsn) stub_application_setting(sentry_environment: environment) + stub_application_setting(sentry_clientside_traces_sample_rate: sentry_clientside_traces_sample_rate) + end + + it 'sets sentry dsn and environment from config' do + expect(gon).to receive(:sentry_dsn=).with(clientside_dsn) + expect(gon).to receive(:sentry_environment=).with(environment) + expect(gon).to receive(:sentry_clientside_traces_sample_rate=).with(sentry_clientside_traces_sample_rate) + + helper.add_gon_variables end context 'when enable_new_sentry_clientside_integration is disabled' do @@ -87,19 +97,8 @@ RSpec.describe Gitlab::GonHelper do it 'does not set sentry dsn and environment from config' do expect(gon).not_to receive(:sentry_dsn=).with(clientside_dsn) expect(gon).not_to receive(:sentry_environment=).with(environment) - - helper.add_gon_variables - end - end - - context 'when enable_new_sentry_clientside_integration is enabled' do - before do - stub_feature_flags(enable_new_sentry_clientside_integration: true) - end - - it 'sets sentry dsn and environment from config' do - expect(gon).to receive(:sentry_dsn=).with(clientside_dsn) - expect(gon).to receive(:sentry_environment=).with(environment) + expect(gon).not_to receive(:sentry_clientside_traces_sample_rate=) + .with(sentry_clientside_traces_sample_rate) helper.add_gon_variables end @@ -169,4 +168,67 @@ RSpec.describe Gitlab::GonHelper do expect(url).to match(/no_avatar.*png$/) end end + + describe '#add_browsersdk_tracking' do + let(:gon) { double('gon').as_null_object } + let(:analytics_url) { 'https://analytics.gitlab.com' } + let(:is_gitlab_com) { true } + + before do + allow(helper).to receive(:gon).and_return(gon) + allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) + end + + context 'when environment variables are set' do + before do + stub_env('GITLAB_ANALYTICS_URL', analytics_url) + stub_env('GITLAB_ANALYTICS_ID', 'analytics-id') + end + + it 'sets the analytics_url and analytics_id' do + expect(gon).to receive(:analytics_url=).with(analytics_url) + expect(gon).to receive(:analytics_id=).with('analytics-id') + + helper.add_browsersdk_tracking + end + + context 'when Gitlab.com? is false' do + let(:is_gitlab_com) { false } + + it "doesn't set the analytics_url and analytics_id" do + expect(gon).not_to receive(:analytics_url=) + expect(gon).not_to receive(:analytics_id=) + + helper.add_browsersdk_tracking + end + end + + context 'when feature flag is false' do + before do + stub_feature_flags(browsersdk_tracking: false) + end + + it "doesn't set the analytics_url and analytics_id" do + expect(gon).not_to receive(:analytics_url=) + expect(gon).not_to receive(:analytics_id=) + + helper.add_browsersdk_tracking + end + end + end + + context 'when environment variables are not set' do + before do + stub_env('GITLAB_ANALYTICS_URL', nil) + stub_env('GITLAB_ANALYTICS_ID', nil) + end + + it "doesn't set the analytics_url and analytics_id" do + expect(gon).not_to receive(:analytics_url=) + expect(gon).not_to receive(:analytics_id=) + + helper.add_browsersdk_tracking + end + end + end end diff --git a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb index 172872fd7eb..55650b0480e 100644 --- a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb +++ b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb @@ -55,7 +55,7 @@ RSpec.describe ::Gitlab::Graphql::Deprecations::Deprecation, feature_category: : it 'raises an error' do expect { parsed_deprecation }.to raise_error(ArgumentError, - '`experiment` and `deprecated` arguments cannot be passed at the same time' + '`alpha` and `deprecated` arguments cannot be passed at the same time' ) end end diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb index 071b303d777..314759fb8a4 100644 --- a/spec/lib/gitlab/group_search_results_spec.rb +++ b/spec/lib/gitlab/group_search_results_spec.rb @@ -51,6 +51,17 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do include_examples 'search results filtered by archived', 'search_merge_requests_hide_archived_projects' end + describe 'milestones search' do + let!(:unarchived_project) { create(:project, :public, group: group) } + let!(:archived_project) { create(:project, :public, :archived, group: group) } + let!(:unarchived_result) { create(:milestone, project: unarchived_project, title: 'foo') } + let!(:archived_result) { create(:milestone, project: archived_project, title: 'foo') } + let(:query) { 'foo' } + let(:scope) { 'milestones' } + + include_examples 'search results filtered by archived', 'search_milestones_hide_archived_projects' + end + describe '#projects' do let(:scope) { 'projects' } let(:query) { 'Test' } @@ -60,7 +71,7 @@ RSpec.describe Gitlab::GroupSearchResults, feature_category: :global_search do let_it_be(:unarchived_result) { create(:project, :public, group: group, name: 'Test1') } let_it_be(:archived_result) { create(:project, :archived, :public, group: group, name: 'Test2') } - it_behaves_like 'search results filtered by archived', 'search_projects_hide_archived' + it_behaves_like 'search results filtered by archived' end end diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb index 93d48379414..9d89167bf81 100644 --- a/spec/lib/gitlab/http_spec.rb +++ b/spec/lib/gitlab/http_spec.rb @@ -35,11 +35,14 @@ RSpec.describe Gitlab::HTTP do super do |response| response.instance_eval do def read_body(*) - @body.each do |fragment| + mock_stream = @body.split(' ') + mock_stream.each do |fragment| sleep 0.002.seconds yield fragment if block_given? end + + @body end end @@ -64,8 +67,8 @@ RSpec.describe Gitlab::HTTP do before do stub_const("#{described_class}::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds) - WebMock.stub_request(:post, /.*/).to_return do |request| - { body: %w(a b), status: 200 } + WebMock.stub_request(:post, /.*/).to_return do + { body: "chunk-1 chunk-2", status: 200 } end end diff --git a/spec/lib/gitlab/import/errors_spec.rb b/spec/lib/gitlab/import/errors_spec.rb index 3b45af0618b..21d96601609 100644 --- a/spec/lib/gitlab/import/errors_spec.rb +++ b/spec/lib/gitlab/import/errors_spec.rb @@ -39,6 +39,7 @@ RSpec.describe Gitlab::Import::Errors, feature_category: :importers do "Noteable can't be blank", "Author can't be blank", "Project does not match noteable project", + "Namespace can't be blank", "User can't be blank", "Name is not a valid emoji name" ) diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 5bbb95b3ea5..d337a37c69f 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -85,6 +85,7 @@ events: notes: - award_emoji - project +- namespace - noteable - author - updated_by @@ -103,6 +104,7 @@ note_metadata: - note - email_participant commit_notes: +- namespace - award_emoji - noteable - author @@ -621,6 +623,7 @@ project: - project_members - project_repository - users +- maintainers - requesters - namespace_members - namespace_requesters @@ -690,7 +693,6 @@ project: - pool_repository - kubernetes_namespaces - error_tracking_setting -- metrics_setting - gitlab_slack_application_integration - github_integration - protected_environments @@ -738,6 +740,7 @@ project: - project_registry - packages - package_files +- package_protection_rules - rpm_repository_files - npm_metadata_caches - packages_cleanup_policy @@ -823,6 +826,7 @@ project: - project_state - security_policy_bots - target_branch_rules +- organization award_emoji: - awardable - user @@ -872,8 +876,6 @@ suggestions: - note diff_note_positions: - note -metrics_setting: -- project protected_environments: - project - group diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb index 8089b40cae8..08abd7908d2 100644 --- a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb +++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb @@ -97,7 +97,6 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter, feature_category: :imp :user | true :author | false :ci_cd_settings | true - :metrics_setting | true :project_badges | true :pipeline_schedules | true :error_tracking_setting | true diff --git a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb index e42a1d0ff8b..13d94fdb6fe 100644 --- a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb +++ b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb @@ -27,8 +27,8 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category expect { saver.execute }.to change(project.issues, :count).by(1) end - context 'when subrelation is present' do - let(:notes) { build_list(:note, 6, project: project, importing: true) } + context 'when subrelation collection is present' do + let(:notes) { build_list(:note, 2, project: project, importing: true) } let(:relation_object) { build(:issue, project: project, notes: notes) } let(:relation_definition) { { 'notes' => {} } } @@ -39,7 +39,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category saver.execute issue = project.issues.last - expect(issue.notes.count).to eq(6) + expect(issue.notes.count).to eq(2) end end @@ -58,25 +58,10 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category end end - context 'when subrelation collection count is small' do - let(:note) { build(:note, project: project, importing: true) } - let(:relation_object) { build(:issue, project: project, notes: [note]) } - let(:relation_definition) { { 'notes' => {} } } - - it 'saves subrelation as part of the relation object itself' do - expect(relation_object.notes).not_to receive(:<<) - - saver.execute - - issue = project.issues.last - expect(issue.notes.count).to eq(1) - end - end - context 'when some subrelations are invalid' do - let(:notes) { build_list(:note, 5, project: project, importing: true) } + let(:note) { build(:note, project: project, importing: true) } let(:invalid_note) { build(:note) } - let(:relation_object) { build(:issue, project: project, notes: notes + [invalid_note]) } + let(:relation_object) { build(:issue, project: project, notes: [note, invalid_note]) } let(:relation_definition) { { 'notes' => {} } } it 'saves valid subrelations and logs invalid subrelation' do @@ -88,7 +73,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category issue = project.issues.last expect(invalid_note.persisted?).to eq(false) - expect(issue.notes.count).to eq(5) + expect(issue.notes.count).to eq(1) end context 'when invalid subrelation can still be persisted' do @@ -112,14 +97,14 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category let(:relation_key) { 'labels' } let(:relation_definition) { { 'priorities' => {} } } let(:importable) { create(:group) } - let(:valid_priorities) { build_list(:label_priority, 5, importing: true) } + let(:valid_priorities) { [build(:label_priority, importing: true)] } let(:invalid_priority) { build(:label_priority, priority: -1) } let(:relation_object) { build(:group_label, group: importable, title: 'test', priorities: valid_priorities + [invalid_priority]) } it 'saves relation without invalid subrelations' do saver.execute - expect(importable.labels.last.priorities.count).to eq(5) + expect(importable.labels.last.priorities.count).to eq(1) end end end diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb index 8ed3a60d7fc..76a35d07c7f 100644 --- a/spec/lib/gitlab/import_export/command_line_util_spec.rb +++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb @@ -203,7 +203,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe it 'throws a blocked url error' do Tempfile.create('test') do |file| - expect { subject.download(url, file.path) }.to raise_error((Gitlab::HTTP::BlockedUrlError)) + expect { subject.download(url, file.path) }.to raise_error(Gitlab::HTTP::BlockedUrlError) end end diff --git a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb index aceea70be92..04e25dee905 100644 --- a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb +++ b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb @@ -13,13 +13,13 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c FileUtils.rm(filepath) end - subject { described_class.new(archive_path: filepath, max_bytes: max_bytes) } + subject { described_class.new(archive_path: filepath) } describe '#valid?' do - let(:max_bytes) { 1 } - context 'when file does not exceed allowed decompressed size' do - let(:max_bytes) { 20 } + before do + stub_application_setting(max_decompressed_archive_size: 20) + end it 'returns true' do expect(subject.valid?).to eq(true) @@ -35,6 +35,10 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c end context 'when file exceeds allowed decompressed size' do + before do + stub_application_setting(max_decompressed_archive_size: 0.000001) + end + it 'logs error message returns false' do expect(Gitlab::Import::Logger) .to receive(:info) @@ -93,7 +97,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c end context 'when timeout occurs' do - let(:error_message) { 'Timeout reached during archive decompression' } + let(:error_message) { 'Timeout of 210 seconds reached during archive decompression' } let(:exception) { Timeout::Error } include_examples 'logs raised exception and terminates validator process group' diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb index d449446d7be..ef118d2987c 100644 --- a/spec/lib/gitlab/import_export/file_importer_spec.rb +++ b/spec/lib/gitlab/import_export/file_importer_spec.rb @@ -198,8 +198,7 @@ RSpec.describe Gitlab::ImportExport::FileImporter, feature_category: :importers context 'when validate_import_decompressed_archive_size feature flag is enabled' do before do stub_feature_flags(validate_import_decompressed_archive_size: true) - - allow(Gitlab::ImportExport::DecompressedArchiveSizeValidator).to receive(:max_bytes).and_return(1) + stub_application_setting(max_decompressed_archive_size: 0.000001) end it 'returns false and sets an error on shared' do diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb index 1d3fc764b50..09a2417ce1e 100644 --- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb +++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb @@ -48,7 +48,6 @@ RSpec.describe 'Test coverage of the Project Import', feature_category: :importe project.ci_pipelines.notes.events.push_event_payload project.protected_branches.unprotect_access_levels project.prometheus_metrics - project.metrics_setting project.boards.lists.label.priorities project.service_desk_setting project.security_setting diff --git a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb index 452d63d548e..486d179ae05 100644 --- a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb +++ b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" -RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter do +RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter, feature_category: :importers do include ImportExport::CommonUtil let(:path) { "#{Dir.tmpdir}/ndjson_writer_spec/tree" } @@ -35,13 +35,18 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter do end context "when single relation is already serialized" do - it "raise exception" do + it "appends to the existing file" do values = [{ "key" => "value_1", "key_1" => "value_1" }, { "key" => "value_2", "key_1" => "value_2" }] relation = "relation" file_path = File.join(path, exportable_path, "#{relation}.ndjson") subject.write_relation(exportable_path, relation, values[0]) - expect { subject.write_relation(exportable_path, relation, values[1]) }.to raise_exception("The #{file_path} already exist") + expect { subject.write_relation(exportable_path, relation, values[1]) }.not_to raise_exception + + file_data = File.read(file_path) + + expect(file_data).to include(values[0].to_json) + expect(file_data).to include(values[1].to_json) end end end diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb index f4c9189030b..e5058e029c8 100644 --- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb +++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb @@ -70,8 +70,9 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer, feature_category create_list(:issue, 3, :with_desc_relative_position, project: exportable ) # ascending ids, descending position end - it 'calls json_writer.write_relation_array with proper params' do + it 'calls json_writer.write_relation_array with proper params and clears SafeRequestStore' do expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(issue.to_json)) + expect(Gitlab::SafeRequestStore).to receive(:clear!) subject.execute end diff --git a/spec/lib/gitlab/import_export/project/export_task_spec.rb b/spec/lib/gitlab/import_export/project/export_task_spec.rb index 95971d08175..0837874526a 100644 --- a/spec/lib/gitlab/import_export/project/export_task_spec.rb +++ b/spec/lib/gitlab/import_export/project/export_task_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe Gitlab::ImportExport::Project::ExportTask, :silence_stdout do +RSpec.describe Gitlab::ImportExport::Project::ExportTask, :silence_stdout, feature_category: :importers do let_it_be(:username) { 'root' } let(:namespace_path) { username } let_it_be(:user) { create(:user, username: username) } diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index 47003707172..c83cfb0e2f5 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -467,7 +467,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i aggregate_failures do expect(release.tag).to eq('release-1.0') - expect(release.author_id).to eq(User.select(:id).ghost.id) + expect(release.author_id).to eq(Users::Internal.ghost.id) end end diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb index b243780a020..db23e3b1fd4 100644 --- a/spec/lib/gitlab/import_sources_spec.rb +++ b/spec/lib/gitlab/import_sources_spec.rb @@ -58,7 +58,7 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do describe '.importer' do import_sources = { 'github' => Gitlab::GithubImport::ParallelImporter, - 'bitbucket' => Gitlab::BitbucketImport::Importer, + 'bitbucket' => Gitlab::BitbucketImport::ParallelImporter, 'bitbucket_server' => Gitlab::BitbucketServerImport::ParallelImporter, 'fogbugz' => Gitlab::FogbugzImport::Importer, 'git' => nil, @@ -72,45 +72,37 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do expect(described_class.importer(name)).to eq(klass) end end - - context 'when flag is disabled' do - before do - stub_feature_flags(bitbucket_server_parallel_importer: false) - end - - it 'returns Gitlab::BitbucketServerImport::Importer when given bitbucket_server' do - expect(described_class.importer('bitbucket_server')).to eq(Gitlab::BitbucketServerImport::Importer) - end - end end describe '.import_table' do subject { described_class.import_table } - it 'returns the ParallelImporter for Bitbucket server' do - is_expected.to include( - described_class::ImportSource.new( - 'bitbucket_server', - 'Bitbucket Server', - Gitlab::BitbucketServerImport::ParallelImporter - ) - ) - end - - context 'when flag is disabled' do - before do - stub_feature_flags(bitbucket_server_parallel_importer: false) - end - - it 'returns the legacy Importer for Bitbucket server' do + describe 'Bitbucket cloud' do + it 'returns the ParallelImporter' do is_expected.to include( described_class::ImportSource.new( - 'bitbucket_server', - 'Bitbucket Server', - Gitlab::BitbucketServerImport::Importer + 'bitbucket', + 'Bitbucket Cloud', + Gitlab::BitbucketImport::ParallelImporter ) ) end + + context 'when flag is disabled' do + before do + stub_feature_flags(bitbucket_parallel_importer: false) + end + + it 'returns the legacy Importer' do + is_expected.to include( + described_class::ImportSource.new( + 'bitbucket', + 'Bitbucket Cloud', + Gitlab::BitbucketImport::Importer + ) + ) + end + end end end @@ -134,7 +126,7 @@ RSpec.describe Gitlab::ImportSources, feature_category: :importers do end describe 'imports_repository? checker' do - let(:allowed_importers) { %w[github gitlab_project bitbucket_server] } + let(:allowed_importers) { %w[github gitlab_project bitbucket bitbucket_server] } it 'fails if any importer other than the allowed ones implements this method' do current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) } diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb index 6271885d80e..4168fdf5425 100644 --- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb +++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb @@ -62,6 +62,7 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :request_store, featur it 'counts successful pipelined requests' do expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original + expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original redis_store_class.with do |redis| redis.pipelined do |pipeline| diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb index af2da8f20c0..b000f55e739 100644 --- a/spec/lib/gitlab/job_waiter_spec.rb +++ b/spec/lib/gitlab/job_waiter_spec.rb @@ -4,13 +4,21 @@ require 'spec_helper' RSpec.describe Gitlab::JobWaiter, :redis, feature_category: :shared do describe '.notify' do - it 'pushes the jid to the named queue' do - key = described_class.new.key + let(:key) { described_class.new.key } + it 'pushes the jid to the named queue', :freeze_time do described_class.notify(key, 123) Gitlab::Redis::SharedState.with do |redis| - expect(redis.ttl(key)).to be > 0 + expect(redis.ttl(key)).to eq(described_class::DEFAULT_TTL) + end + end + + it 'can be passed a custom TTL', :freeze_time do + described_class.notify(key, 123, ttl: 5.minutes) + + Gitlab::Redis::SharedState.with do |redis| + expect(redis.ttl(key)).to eq(5.minutes.to_i) end end end @@ -23,6 +31,32 @@ RSpec.describe Gitlab::JobWaiter, :redis, feature_category: :shared do end end + describe '.delete_key' do + let(:key) { described_class.generate_key } + + it 'deletes the key' do + described_class.notify(key, '1') + described_class.delete_key(key) + + Gitlab::Redis::SharedState.with do |redis| + expect(redis.llen(key)).to eq(0) + end + end + + context 'when key is not a JobWaiter key' do + let(:key) { 'foo' } + + it 'does not delete the key' do + described_class.notify(key, '1') + described_class.delete_key(key) + + Gitlab::Redis::SharedState.with do |redis| + expect(redis.llen(key)).to eq(1) + end + end + end + end + describe '#wait' do let(:waiter) { described_class.new(2) } diff --git a/spec/lib/gitlab/manifest_import/metadata_spec.rb b/spec/lib/gitlab/manifest_import/metadata_spec.rb index c55b407088d..011371782fe 100644 --- a/spec/lib/gitlab/manifest_import/metadata_spec.rb +++ b/spec/lib/gitlab/manifest_import/metadata_spec.rb @@ -46,16 +46,6 @@ RSpec.describe Gitlab::ManifestImport::Metadata, :clean_gitlab_redis_shared_stat expect(status.repositories).to eq(repositories) end - - it 'reads non-hash-tagged keys if hash-tag keys are missing' do - status = described_class.new(user) - - Gitlab::Redis::SharedState.with do |redis| - redis.set(repositories_key, Gitlab::Json.dump(repositories)) - end - - expect(status.repositories).to eq(repositories) - end end describe '#group_id' do @@ -73,13 +63,5 @@ RSpec.describe Gitlab::ManifestImport::Metadata, :clean_gitlab_redis_shared_stat expect(status.group_id).to eq(3) end - - it 'reads non-hash-tagged keys if hash-tag keys are missing' do - status = described_class.new(user) - - Gitlab::Redis::SharedState.with { |redis| redis.set(group_id_key, 2) } - - expect(status.group_id).to eq(2) - end end end diff --git a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb deleted file mode 100644 index 8c2edc85c35..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb +++ /dev/null @@ -1,88 +0,0 @@ -# frozen_string_literal: true -# rubocop:disable Style/RedundantFetchBlock - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Cache, :use_clean_rails_memory_store_caching do - let_it_be(:project1) { build_stubbed(:project) } - let_it_be(:project2) { build_stubbed(:project) } - - let(:project1_key1) { "#{project1.id}_key1" } - let(:project1_key2) { "#{project1.id}_key2" } - let(:project2_key1) { "#{project2.id}_key1" } - - let(:cache1) { described_class.for(project1) } - let(:cache2) { described_class.for(project2) } - - before do - cache1.fetch(project1_key1) { 'data1' } - cache1.fetch(project1_key2) { 'data2' } - cache2.fetch(project2_key1) { 'data3' } - end - - describe '.fetch' do - it 'stores data correctly' do - described_class.fetch('key1') { 'data1' } - described_class.fetch('key2') { 'data2' } - - expect(described_class.fetch('key1')).to eq('data1') - expect(described_class.fetch('key2')).to eq('data2') - end - end - - describe '.for' do - it 'returns a new instance' do - expect(described_class.for(project1)).to be_instance_of(described_class) - end - end - - describe '#fetch' do - it 'stores data correctly' do - expect(cache1.fetch(project1_key1)).to eq('data1') - expect(cache1.fetch(project1_key2)).to eq('data2') - expect(cache2.fetch(project2_key1)).to eq('data3') - end - end - - describe '#delete_all!' do - it 'deletes keys of the given project', :aggregate_failures do - cache1.delete_all! - - expect(Rails.cache.exist?(project1_key1)).to be(false) - expect(Rails.cache.exist?(project1_key2)).to be(false) - expect(cache2.fetch(project2_key1)).to eq('data3') - - cache2.delete_all! - - expect(Rails.cache.exist?(project2_key1)).to be(false) - end - - it 'does not fail when nothing to delete' do - project3 = build_stubbed(:project) - cache3 = described_class.for(project3) - - expect { cache3.delete_all! }.not_to raise_error - end - end - - context 'multiple fetches and deletes' do - specify :aggregate_failures do - cache1.delete_all! - - expect(Rails.cache.exist?(project1_key1)).to be(false) - expect(Rails.cache.exist?(project1_key2)).to be(false) - - cache1.fetch("#{project1.id}_key3") { 'data1' } - cache1.fetch("#{project1.id}_key4") { 'data2' } - - expect(cache1.fetch("#{project1.id}_key3")).to eq('data1') - expect(cache1.fetch("#{project1.id}_key4")).to eq('data2') - - cache1.delete_all! - - expect(Rails.cache.exist?("#{project1.id}_key3")).to be(false) - expect(Rails.cache.exist?("#{project1.id}_key4")).to be(false) - end - end -end -# rubocop:enable Style/RedundantFetchBlock diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb deleted file mode 100644 index 11b587e4905..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Processor do - include MetricsDashboardHelpers - - let(:project) { build(:project) } - let(:environment) { create(:environment, project: project) } - let(:dashboard_yml) { load_sample_dashboard } - - describe 'process' do - let(:sequence) do - [ - Gitlab::Metrics::Dashboard::Stages::UrlValidator - ] - end - - let(:process_params) { [project, dashboard_yml, sequence, { environment: environment }] } - let(:dashboard) { described_class.new(*process_params).process } - - context 'when the dashboard is not present' do - let(:dashboard_yml) { nil } - - it 'returns nil' do - expect(dashboard).to be_nil - end - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb deleted file mode 100644 index a2c9906c0e9..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::RepoDashboardFinder do - include MetricsDashboardHelpers - - let_it_be(:project) { create(:project) } - - describe '.list_dashboards' do - it 'deletes dashboard cache entries' do - cache = instance_double(Gitlab::Metrics::Dashboard::Cache) - allow(Gitlab::Metrics::Dashboard::Cache).to receive(:for).and_return(cache) - - expect(cache).to receive(:delete_all!) - - described_class.list_dashboards(project) - end - - it 'returns empty array when there are no dashboards' do - expect(described_class.list_dashboards(project)).to eq([]) - end - - context 'when there are project dashboards available' do - let_it_be(:dashboard_path) { '.gitlab/dashboards/test.yml' } - let_it_be(:project) { project_with_dashboard(dashboard_path) } - - it 'returns the dashboard list' do - expect(described_class.list_dashboards(project)).to contain_exactly(dashboard_path) - end - end - end - - describe '.read_dashboard' do - it 'raises error when dashboard does not exist' do - dashboard_path = '.gitlab/dashboards/test.yml' - - expect { described_class.read_dashboard(project, dashboard_path) }.to raise_error( - Gitlab::Metrics::Dashboard::Errors::NOT_FOUND_ERROR - ) - end - - context 'when there are project dashboards available' do - let_it_be(:dashboard_path) { '.gitlab/dashboards/test.yml' } - let_it_be(:project) { project_with_dashboard(dashboard_path) } - - it 'reads dashboard' do - expect(described_class.read_dashboard(project, dashboard_path)).to eq( - fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') - ) - end - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb deleted file mode 100644 index 83cf161c4e2..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/stages/url_validator_spec.rb +++ /dev/null @@ -1,101 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Stages::UrlValidator do - let(:project) { build_stubbed(:project) } - - describe '#transform!' do - context 'when the links contain a blocked url' do - let(:dashboard) do - { - dashboard: "Test Dashboard", - links: [ - { url: "http://1.1.1.1.1" }, - { url: "https://gitlab.com" }, - { url: "http://0.0.0.0" } - ], - panel_groups: [ - { - group: "Group A", - panels: [ - { - title: "Super Chart A1", - type: "area-chart", - y_label: "y_label", - metrics: [ - { - id: "metric_a1", - query_range: "query", - unit: "unit", - label: "Legend Label" - } - ], - links: [ - { url: "http://1.1.1.1.1" }, - { url: "https://gitlab.com" }, - { url: "http://0.0.0.0" } - ] - } - ] - } - ] - } - end - - let(:expected) do - [{ url: '' }, { url: 'https://gitlab.com' }, { url: 'http://0.0.0.0' }] - end - - let(:transform!) { described_class.new(project, dashboard, nil).transform! } - - before do - stub_env('RSPEC_ALLOW_INVALID_URLS', 'false') - stub_application_setting(allow_local_requests_from_web_hooks_and_services: true) - end - - context 'dashboard related links' do - it 'replaces the blocked url with an empty string' do - transform! - - expect(dashboard[:links]).to eq(expected) - end - end - - context 'chart links' do - it 'replaces the blocked url with an empty string' do - transform! - - result = dashboard.dig(:panel_groups, 0, :panels, 0, :links) - expect(result).to eq(expected) - end - end - - context 'when local requests are not allowed' do - before do - stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) - end - - let(:expected) do - [{ url: '' }, { url: 'https://gitlab.com' }, { url: '' }] - end - - it 'replaces the blocked url with an empty string' do - transform! - - expect(dashboard[:links]).to eq(expected) - end - end - - context 'when the links are an array of strings instead of hashes' do - before do - dashboard[:links] = dashboard[:links].map(&:values) - end - - it 'prevents an invalid link definition from erroring out' do - expect { transform! }.not_to raise_error - end - end - end - end -end diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb deleted file mode 100644 index a035cf02da4..00000000000 --- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb +++ /dev/null @@ -1,106 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Metrics::Dashboard::Url do - include Gitlab::Routing.url_helpers - - describe '#clusters_regex' do - let(:url) { Gitlab::Routing.url_helpers.namespace_project_cluster_url(*url_params) } - let(:url_params) do - [ - 'foo', - 'bar', - '1', - { - group: 'Cluster Health', - title: 'Memory Usage', - y_label: 'Memory 20(GiB)', - anchor: 'title' - } - ] - end - - let(:expected_params) do - { - 'url' => url, - 'namespace' => 'foo', - 'project' => 'bar', - 'cluster_id' => '1', - 'query' => '?group=Cluster+Health&title=Memory+Usage&y_label=Memory+20%28GiB%29', - 'anchor' => '#title' - } - end - - subject { described_class.clusters_regex } - - it_behaves_like 'regex which matches url when expected' - - context 'for metrics_dashboard route' do - let(:url) do - metrics_dashboard_namespace_project_cluster_url( - *url_params, cluster_type: :project, embedded: true, format: :json - ) - end - - let(:expected_params) do - { - 'url' => url, - 'namespace' => 'foo', - 'project' => 'bar', - 'cluster_id' => '1', - 'query' => '?cluster_type=project&embedded=true', - 'anchor' => nil - } - end - - it_behaves_like 'regex which matches url when expected' - end - end - - describe '#alert_regex' do - let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params) } - let(:url_params) do - [ - 'foo', - 'bar', - '1', - { - start: '2020-02-10T12:59:49.938Z', - end: '2020-02-10T20:59:49.938Z', - anchor: "anchor" - } - ] - end - - let(:expected_params) do - { - 'url' => url, - 'namespace' => 'foo', - 'project' => 'bar', - 'alert' => '1', - 'query' => "?end=2020-02-10T20%3A59%3A49.938Z&start=2020-02-10T12%3A59%3A49.938Z", - 'anchor' => '#anchor' - } - end - - subject { described_class.alert_regex } - - it_behaves_like 'regex which matches url when expected' - - it_behaves_like 'regex which matches url when expected' do - let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params, format: :json) } - - let(:expected_params) do - { - 'url' => url, - 'namespace' => 'foo', - 'project' => 'bar', - 'alert' => '1', - 'query' => nil, - 'anchor' => nil - } - end - end - end -end diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb index 57790ad78a8..85e8b366f29 100644 --- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb @@ -8,6 +8,32 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do it_behaves_like 'metrics sampler', 'DATABASE_SAMPLER' describe '#sample' do + let(:main_load_balancer) do + double(:main_load_balancer, host_list: main_host_list, configuration: main_configuration, primary_only?: false) + end + + let(:main_configuration) { double(:configuration, connection_specification_name: 'ActiveRecord::Base') } + let(:main_host_list) { double(:host_list, hosts: [main_replica_host]) } + let(:main_replica_host) { double(:host, pool: main_replica_pool, host: 'main-replica-host', port: 2345) } + let(:main_replica_pool) do + double(:main_replica_pool, db_config: double(:main_replica_db_config, name: 'main_replica'), stat: stats) + end + + let(:stats) do + { size: 123, connections: 100, busy: 10, dead: 5, idle: 85, waiting: 1 } + end + + let(:ci_load_balancer) do + double(:ci_load_balancer, host_list: ci_host_list, configuration: ci_configuration, primary_only?: false) + end + + let(:ci_configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') } + let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) } + let(:ci_replica_host) { double(:host, pool: ci_replica_pool, host: 'ci-replica-host', port: 3456) } + let(:ci_replica_pool) do + double(:ci_replica_pool, db_config: double(:ci_replica_db_config, name: 'ci_replica'), stat: stats) + end + let(:main_labels) do { class: 'ActiveRecord::Base', @@ -62,35 +88,9 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do end context 'when replica hosts are configured' do - let(:main_load_balancer) { ApplicationRecord.load_balancer } - let(:main_replica_host) { main_load_balancer.host } - - let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) } - let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') } - let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) } - let(:ci_replica_host) { double(:host, connection: ci_connection) } - let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) } - before do allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer) .and_return([main_load_balancer, ci_load_balancer].to_enum) - - allow(main_load_balancer).to receive(:primary_only?).and_return(false) - allow(ci_load_balancer).to receive(:primary_only?).and_return(false) - - allow(main_replica_host).to receive(:host).and_return('main-replica-host') - allow(ci_replica_host).to receive(:host).and_return('ci-replica-host') - - allow(main_replica_host).to receive(:port).and_return(2345) - allow(ci_replica_host).to receive(:port).and_return(3456) - - allow(Gitlab::Database).to receive(:db_config_name) - .with(main_replica_host.connection) - .and_return('main_replica') - - allow(Gitlab::Database).to receive(:db_config_name) - .with(ci_replica_host.connection) - .and_return('ci_replica') end it 'samples connection pool statistics for primaries and replicas' do @@ -117,35 +117,9 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do end context 'when the base model has replica connections' do - let(:main_load_balancer) { ApplicationRecord.load_balancer } - let(:main_replica_host) { main_load_balancer.host } - - let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) } - let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') } - let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) } - let(:ci_replica_host) { double(:host, connection: ci_connection) } - let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) } - before do allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer) .and_return([main_load_balancer, ci_load_balancer].to_enum) - - allow(main_load_balancer).to receive(:primary_only?).and_return(false) - allow(ci_load_balancer).to receive(:primary_only?).and_return(false) - - allow(main_replica_host).to receive(:host).and_return('main-replica-host') - allow(ci_replica_host).to receive(:host).and_return('ci-replica-host') - - allow(main_replica_host).to receive(:port).and_return(2345) - allow(ci_replica_host).to receive(:port).and_return(3456) - - allow(Gitlab::Database).to receive(:db_config_name) - .with(main_replica_host.connection) - .and_return('main_replica') - - allow(Gitlab::Database).to receive(:db_config_name) - .with(ci_replica_host.connection) - .and_return('ci_replica') end it 'still records the replica metrics' do diff --git a/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb index 5394cea64af..b85256f32c5 100644 --- a/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb +++ b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb @@ -3,7 +3,7 @@ require 'fast_spec_helper' require 'action_dispatch' require 'rack' -require 'request_store' +require 'gitlab/safe_request_store' RSpec.describe Gitlab::Middleware::WebhookRecursionDetection do let(:app) { double(:app) } diff --git a/spec/lib/gitlab/observability_spec.rb b/spec/lib/gitlab/observability_spec.rb index 61f69a0171a..04c35f0ee3a 100644 --- a/spec/lib/gitlab/observability_spec.rb +++ b/spec/lib/gitlab/observability_spec.rb @@ -40,16 +40,10 @@ RSpec.describe Gitlab::Observability, feature_category: :error_tracking do it { is_expected.to eq("#{described_class.observability_url}/v1/auth/start") } end - describe '.tracing_url' do - subject { described_class.tracing_url(project) } - - it { is_expected.to eq("#{described_class.observability_url}/query/#{group.id}/#{project.id}/v1/traces") } - end - describe '.provisioning_url' do subject { described_class.provisioning_url(project) } - it { is_expected.to eq(described_class.observability_url.to_s) } + it { is_expected.to eq("#{described_class.observability_url}/v3/tenant/#{project.id}") } end describe '.build_full_url' do @@ -169,27 +163,6 @@ RSpec.describe Gitlab::Observability, feature_category: :error_tracking do end end - describe '.tracing_enabled?' do - let_it_be(:project) { create(:project, :repository) } - - it 'returns true if feature is enabled globally' do - expect(described_class.tracing_enabled?(project)).to eq(true) - end - - it 'returns true if feature is enabled for the project' do - stub_feature_flags(observability_tracing: false) - stub_feature_flags(observability_tracing: project) - - expect(described_class.tracing_enabled?(project)).to eq(true) - end - - it 'returns false if feature is disabled globally' do - stub_feature_flags(observability_tracing: false) - - expect(described_class.tracing_enabled?(project)).to eq(false) - end - end - describe '.allowed_for_action?' do let(:group) { build_stubbed(:group) } let(:user) { build_stubbed(:user) } diff --git a/spec/lib/gitlab/other_markup_spec.rb b/spec/lib/gitlab/other_markup_spec.rb index 74e2c5e26c1..34f1e0cfbc5 100644 --- a/spec/lib/gitlab/other_markup_spec.rb +++ b/spec/lib/gitlab/other_markup_spec.rb @@ -2,9 +2,48 @@ require 'spec_helper' -RSpec.describe Gitlab::OtherMarkup do +RSpec.describe Gitlab::OtherMarkup, feature_category: :wiki do let(:context) { {} } + context 'when restructured text' do + it 'renders' do + input = <<~RST + Header + ====== + + *emphasis*; **strong emphasis**; `interpreted text` + RST + + output = <<~HTML +

Header

+

emphasis; strong emphasis; interpreted text

+ HTML + + expect(render('unimportant_name.rst', input, context)).to include(output.strip) + end + + context 'when PlantUML is enabled' do + it 'generates the diagram' do + Gitlab::CurrentSettings.current_application_settings.update!(plantuml_enabled: true, plantuml_url: 'https://plantuml.com/plantuml') + + input = <<~RST + .. plantuml:: + :caption: Caption with **bold** and *italic* + + Bob -> Alice: hello + Alice -> Bob: hi + RST + + output = <<~HTML + +

Caption with bold and italic

+ HTML + + expect(render('unimportant_name.rst', input, context)).to include(output.strip) + end + end + end + context 'XSS Checks' do links = { 'links' => { diff --git a/spec/lib/gitlab/pages/cache_control_spec.rb b/spec/lib/gitlab/pages/cache_control_spec.rb deleted file mode 100644 index 72240f52580..00000000000 --- a/spec/lib/gitlab/pages/cache_control_spec.rb +++ /dev/null @@ -1,88 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Pages::CacheControl, feature_category: :pages do - RSpec.shared_examples 'cache_control' do |type| - it { expect(subject.cache_key).to match(/pages_domain_for_#{type}_1_*/) } - - describe '#clear_cache', :use_clean_rails_redis_caching do - before do - Rails.cache.write("pages_domain_for_#{type}_1", ['settings-hash']) - Rails.cache.write("pages_domain_for_#{type}_1_settings-hash", 'payload') - end - - it 'clears the cache' do - cached_keys = [ - "pages_domain_for_#{type}_1_settings-hash", - "pages_domain_for_#{type}_1" - ] - - expect(::Gitlab::AppLogger) - .to receive(:info) - .with( - message: 'clear pages cache', - pages_keys: cached_keys, - pages_type: type, - pages_id: 1 - ) - - expect(Rails.cache) - .to receive(:delete_multi) - .with(cached_keys) - - subject.clear_cache - end - end - end - - describe '.for_namespace' do - subject(:cache_control) { described_class.for_namespace(1) } - - it_behaves_like 'cache_control', :namespace - end - - describe '.for_domain' do - subject(:cache_control) { described_class.for_domain(1) } - - it_behaves_like 'cache_control', :domain - end - - describe '#cache_key' do - it 'does not change the pages config' do - expect { described_class.new(type: :domain, id: 1).cache_key } - .not_to change(Gitlab.config, :pages) - end - - it 'is based on pages settings' do - access_control = Gitlab.config.pages.access_control - cache_key = described_class.new(type: :domain, id: 1).cache_key - - stub_config(pages: { access_control: !access_control }) - - expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key) - end - - it 'is based on the force_pages_access_control settings' do - force_pages_access_control = ::Gitlab::CurrentSettings.force_pages_access_control - cache_key = described_class.new(type: :domain, id: 1).cache_key - - ::Gitlab::CurrentSettings.force_pages_access_control = !force_pages_access_control - - expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key) - end - - it 'caches the application settings hash' do - expect(Rails.cache) - .to receive(:write) - .with('pages_domain_for_domain_1', kind_of(Set)) - - described_class.new(type: :domain, id: 1).cache_key - end - end - - it 'fails with invalid type' do - expect { described_class.new(type: :unknown, id: nil) } - .to raise_error(ArgumentError, 'type must be :namespace or :domain') - end -end diff --git a/spec/lib/gitlab/pages/virtual_host_finder_spec.rb b/spec/lib/gitlab/pages/virtual_host_finder_spec.rb index 49eee772f8d..8c34968bbfc 100644 --- a/spec/lib/gitlab/pages/virtual_host_finder_spec.rb +++ b/spec/lib/gitlab/pages/virtual_host_finder_spec.rb @@ -40,23 +40,9 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do it 'returns the virual domain when there are pages deployed for the project' do expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.cache_key).to match(/pages_domain_for_domain_#{pages_domain.id}_/) expect(virtual_domain.lookup_paths.length).to eq(1) expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) end - - context 'when :cache_pages_domain_api is disabled' do - before do - stub_feature_flags(cache_pages_domain_api: false) - end - - it 'returns the virual domain when there are pages deployed for the project' do - expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.cache_key).to be_nil - expect(virtual_domain.lookup_paths.length).to eq(1) - expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) - end - end end end @@ -76,23 +62,8 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/) expect(virtual_domain.lookup_paths.length).to eq(0) end - - context 'when :cache_pages_domain_api is disabled' do - before do - stub_feature_flags(cache_pages_domain_api: false) - end - - it 'returns the virual domain with no lookup_paths' do - virtual_domain = described_class.new("#{project.namespace.path}.example.com".downcase).execute - - expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.cache_key).to be_nil - expect(virtual_domain.lookup_paths.length).to eq(0) - end - end end context 'when there are pages deployed for the project' do @@ -111,7 +82,6 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/) expect(virtual_domain.lookup_paths.length).to eq(1) expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) end @@ -120,25 +90,9 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do virtual_domain = described_class.new("#{project.namespace.path}.Example.com").execute expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/) expect(virtual_domain.lookup_paths.length).to eq(1) expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) end - - context 'when :cache_pages_domain_api is disabled' do - before_all do - stub_feature_flags(cache_pages_domain_api: false) - end - - it 'returns the virual domain when there are pages deployed for the project' do - virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute - - expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.cache_key).to be_nil - expect(virtual_domain.lookup_paths.length).to eq(1) - expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) - end - end end end @@ -187,18 +141,6 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) end end - - context 'when :cache_pages_domain_api is disabled' do - before do - stub_feature_flags(cache_pages_domain_api: false) - end - - it 'returns the virual domain when there are pages deployed for the project' do - expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain) - expect(virtual_domain.lookup_paths.length).to eq(1) - expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id) - end - end end end diff --git a/spec/lib/gitlab/pages_spec.rb b/spec/lib/gitlab/pages_spec.rb index 9f85efd56e6..c20956788ac 100644 --- a/spec/lib/gitlab/pages_spec.rb +++ b/spec/lib/gitlab/pages_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Pages do +RSpec.describe Gitlab::Pages, feature_category: :pages do using RSpec::Parameterized::TableSyntax let(:pages_secret) { SecureRandom.random_bytes(Gitlab::Pages::SECRET_LENGTH) } @@ -48,4 +48,89 @@ RSpec.describe Gitlab::Pages do it { is_expected.to eq(result) } end end + + describe '.multiple_versions_enabled_for?' do + context 'when project is nil' do + it 'returns false' do + expect(described_class.multiple_versions_enabled_for?(nil)).to eq(false) + end + end + + context 'when a project is given' do + let_it_be(:project) { create(:project) } + + where(:setting, :feature_flag, :license, :result) do + false | false | false | false + false | false | true | false + false | true | false | false + false | true | true | false + true | false | false | false + true | false | true | false + true | true | false | false + true | true | true | true + end + + with_them do + let_it_be(:project) { create(:project) } + + subject { described_class.multiple_versions_enabled_for?(project) } + + before do + stub_licensed_features(pages_multiple_versions: license) + stub_feature_flags(pages_multiple_versions_setting: feature_flag) + project.project_setting.update!(pages_multiple_versions_enabled: setting) + end + + # this feature is only available in EE + it { is_expected.to eq(result && Gitlab.ee?) } + end + end + end + + describe '#add_unique_domain_to' do + let(:project) { build(:project) } + + context 'when pages is not enabled' do + before do + stub_pages_setting(enabled: false) + end + + it 'does not set pages unique domain' do + expect(Gitlab::Pages::RandomDomain).not_to receive(:generate) + + described_class.add_unique_domain_to(project) + + expect(project.project_setting.pages_unique_domain_enabled).to eq(false) + expect(project.project_setting.pages_unique_domain).to eq(nil) + end + end + + context 'when pages is enabled' do + before do + stub_pages_setting(enabled: true) + end + + it 'enables unique domain by default' do + allow(Gitlab::Pages::RandomDomain) + .to receive(:generate) + .and_return('unique-domain') + + described_class.add_unique_domain_to(project) + + expect(project.project_setting.pages_unique_domain_enabled).to eq(true) + expect(project.project_setting.pages_unique_domain).to eq('unique-domain') + end + + context 'when project already have a unique domain' do + it 'does not changes the original unique domain' do + expect(Gitlab::Pages::RandomDomain).not_to receive(:generate) + project.project_setting.update!(pages_unique_domain: 'unique-domain') + + described_class.add_unique_domain_to(project.reload) + + expect(project.project_setting.pages_unique_domain).to eq('unique-domain') + end + end + end + end end diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb index 7cee65c13f7..4128f745ce7 100644 --- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb +++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb @@ -6,20 +6,52 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do subject { described_class } describe '.available_for_type?' do - it 'returns true for Group' do - expect(subject.available_for_type?(Group.all)).to be_truthy - end + context 'with api_keyset_pagination_multi_order FF disabled' do + before do + stub_feature_flags(api_keyset_pagination_multi_order: false) + end - it 'returns true for Ci::Build' do - expect(subject.available_for_type?(Ci::Build.all)).to be_truthy - end + it 'returns true for Group' do + expect(subject.available_for_type?(Group.all)).to be_truthy + end - it 'returns true for Packages::BuildInfo' do - expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy + it 'returns true for Ci::Build' do + expect(subject.available_for_type?(Ci::Build.all)).to be_truthy + end + + it 'returns true for Packages::BuildInfo' do + expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy + end + + it 'return false for User' do + expect(subject.available_for_type?(User.all)).to be_falsey + end end - it 'return false for other types of relations' do - expect(subject.available_for_type?(User.all)).to be_falsey + context 'with api_keyset_pagination_multi_order FF enabled' do + before do + stub_feature_flags(api_keyset_pagination_multi_order: true) + end + + it 'returns true for Group' do + expect(subject.available_for_type?(Group.all)).to be_truthy + end + + it 'returns true for Ci::Build' do + expect(subject.available_for_type?(Ci::Build.all)).to be_truthy + end + + it 'returns true for Packages::BuildInfo' do + expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy + end + + it 'returns true for User' do + expect(subject.available_for_type?(User.all)).to be_truthy + end + + it 'return false for other types of relations' do + expect(subject.available_for_type?(Issue.all)).to be_falsey + end end end @@ -58,7 +90,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do end it 'return false for other types of relations' do - expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey + expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey end @@ -68,16 +100,48 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do let(:order_by) { :id } let(:sort) { :desc } - it 'returns true for Ci::Build' do - expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy - end + context 'with api_keyset_pagination_multi_order FF disabled' do + before do + stub_feature_flags(api_keyset_pagination_multi_order: false) + end + + it 'returns true for Ci::Build' do + expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy + end + + it 'returns true for AuditEvent' do + expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy + end - it 'returns true for AuditEvent' do - expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy + it 'returns true for Packages::BuildInfo' do + expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy + end + + it 'returns false for User' do + expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey + end end - it 'returns true for Packages::BuildInfo' do - expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy + context 'with api_keyset_pagination_multi_order FF enabled' do + before do + stub_feature_flags(api_keyset_pagination_multi_order: true) + end + + it 'returns true for Ci::Build' do + expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy + end + + it 'returns true for AuditEvent' do + expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy + end + + it 'returns true for Packages::BuildInfo' do + expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy + end + + it 'returns true for User' do + expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy + end end end @@ -90,7 +154,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do end it 'return false for other types of relations' do - expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey + expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey end end end diff --git a/spec/lib/gitlab/patch/redis_cache_store_spec.rb b/spec/lib/gitlab/patch/redis_cache_store_spec.rb index 5a674d443bb..21c256fdbbe 100644 --- a/spec/lib/gitlab/patch/redis_cache_store_spec.rb +++ b/spec/lib/gitlab/patch/redis_cache_store_spec.rb @@ -34,36 +34,58 @@ RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, f end context 'when reading large amount of keys' do - it 'batches get into pipelines of 100' do - cache.redis.with do |redis| - normal_cluster = !redis.is_a?(Gitlab::Redis::MultiStore) && Gitlab::Redis::ClusterUtil.cluster?(redis) - multistore_cluster = redis.is_a?(Gitlab::Redis::MultiStore) && - ::Gitlab::Redis::ClusterUtil.cluster?(redis.default_store) + let(:input_size) { 2000 } + let(:chunk_size) { 1000 } + + shared_examples 'read large amount of keys' do + it 'breaks the input into 2 chunks for redis cluster' do + cache.redis.with do |redis| + normal_cluster = !redis.is_a?(Gitlab::Redis::MultiStore) && Gitlab::Redis::ClusterUtil.cluster?(redis) + multistore_cluster = redis.is_a?(Gitlab::Redis::MultiStore) && + ::Gitlab::Redis::ClusterUtil.cluster?(redis.default_store) + + if normal_cluster || multistore_cluster + expect_next_instances_of(Gitlab::Redis::CrossSlot::Pipeline, 2) do |pipeline| + obj = instance_double(::Redis) + expect(pipeline).to receive(:pipelined).and_yield(obj) + expect(obj).to receive(:get).exactly(chunk_size).times + end + else + expect(redis).to receive(:mget).and_call_original + end + end - if normal_cluster || multistore_cluster - expect(redis).to receive(:pipelined).at_least(2).and_call_original - else - expect(redis).to receive(:mget).and_call_original + Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do + cache.read_multi(*Array.new(input_size) { |i| i }) end end + end - Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do - cache.read_multi(*Array.new(101) { |i| i }) + context 'when GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT is smaller than the default' do + before do + stub_env('GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT', 10) end + + it_behaves_like 'read large amount of keys' end - end - end - context 'when cache is Rails.cache' do - let(:cache) { Rails.cache } + context 'when GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT is larger than the default' do + let(:input_size) { 4000 } + let(:chunk_size) { 2000 } - context 'when reading using secondary store as default' do - before do - stub_feature_flags(use_primary_store_as_default_for_cache: false) + before do + stub_env('GITLAB_REDIS_CLUSTER_PIPELINE_BATCH_LIMIT', chunk_size) + end + + it_behaves_like 'read large amount of keys' end - it_behaves_like 'reading using cache stores' + it_behaves_like 'read large amount of keys' end + end + + context 'when cache is Rails.cache' do + let(:cache) { Rails.cache } it_behaves_like 'reading using cache stores' end @@ -97,7 +119,7 @@ RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, f context 'when deleting large amount of keys' do before do - 200.times { |i| cache.write(i, i) } + 2000.times { |i| cache.write(i, i) } end it 'calls pipeline multiple times' do @@ -113,9 +135,9 @@ RSpec.describe Gitlab::Patch::RedisCacheStore, :use_clean_rails_redis_caching, f expect( Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do - cache.delete_multi(Array(0..199)) + cache.delete_multi(Array(0..1999)) end - ).to eq(200) + ).to eq(2000) end end end diff --git a/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb b/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb new file mode 100644 index 00000000000..f57257cd1c0 --- /dev/null +++ b/spec/lib/gitlab/patch/sidekiq_scheduled_enq_spec.rb @@ -0,0 +1,89 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Patch::SidekiqScheduledEnq, :clean_gitlab_redis_queues, feature_category: :scalability do + describe '#enqueue_jobs' do + let_it_be(:payload) { {} } + + before do + allow(Sidekiq).to receive(:load_json).and_return(payload) + + # stub data in both namespaces + Sidekiq.redis { |c| c.zadd('schedule', 100, 'dummy') } + Gitlab::Redis::Queues.with { |c| c.zadd('schedule', 100, 'dummy') } + end + + subject { Sidekiq::Scheduled::Enq.new.enqueue_jobs } + + it 'polls both namespaces by default' do + expect(Sidekiq::Client).to receive(:push).with(payload).twice + + subject + + Sidekiq.redis do |conn| + expect(conn.zcard('schedule')).to eq(0) + end + + Gitlab::Redis::Queues.with do |conn| + expect(conn.zcard('schedule')).to eq(0) + end + end + + context 'when SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING is disabled' do + before do + stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'false') + end + + it 'polls via Sidekiq.redis only' do + expect(Sidekiq::Client).to receive(:push).with(payload).once + + subject + + Sidekiq.redis do |conn| + expect(conn.zcard('schedule')).to eq(0) + end + + Gitlab::Redis::Queues.with do |conn| + expect(conn.zcard('schedule')).to eq(1) + end + end + end + + context 'when both envvar are enabled' do + around do |example| + # runs the zadd to ensure it goes into namespaced set + Sidekiq.redis { |c| c.zadd('schedule', 100, 'dummy') } + + holder = Sidekiq.redis_pool + + # forcibly replace Sidekiq.redis since this is set in config/initializer/sidekiq.rb + Sidekiq.redis = Gitlab::Redis::Queues.pool + + example.run + + ensure + Sidekiq.redis = holder + end + + before do + stub_env('SIDEKIQ_ENQUEUE_NON_NAMESPACED', 'true') + stub_env('SIDEKIQ_ENABLE_DUAL_NAMESPACE_POLLING', 'true') + end + + it 'polls both sets' do + expect(Sidekiq::Client).to receive(:push).with(payload).twice + + subject + + Sidekiq.redis do |conn| + expect(conn.zcard('schedule')).to eq(0) + end + + Gitlab::Redis::Queues.with do |conn| + expect(conn.zcard('schedule')).to eq(0) + end + end + end + end +end diff --git a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb deleted file mode 100644 index 559557f9313..00000000000 --- a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb +++ /dev/null @@ -1,248 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do - include Prometheus::MetricBuilders - - let(:parser_error_class) { Gitlab::Prometheus::ParsingError } - - describe '#load_groups_from_yaml' do - subject { described_class.load_groups_from_yaml('dummy.yaml') } - - describe 'parsing sample yaml' do - let(:sample_yaml) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: "title" - required_metrics: [ metric_a, metric_b ] - weight: 1 - queries: [{ query_range: 'query_range_a', label: label, unit: unit }] - - title: "title" - required_metrics: [metric_a] - weight: 1 - queries: [{ query_range: 'query_range_empty' }] - - group: group_b - priority: 1 - metrics: - - title: title - required_metrics: ['metric_a'] - weight: 1 - queries: [{query_range: query_range_a}] - EOF - end - - before do - allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(sample_yaml) } - end - - it 'parses to two metric groups with 2 and 1 metric respectively' do - expect(subject.count).to eq(2) - expect(subject[0].metrics.count).to eq(2) - expect(subject[1].metrics.count).to eq(1) - end - - it 'provide group data' do - expect(subject[0]).to have_attributes(name: 'group_a', priority: 1) - expect(subject[1]).to have_attributes(name: 'group_b', priority: 1) - end - - it 'provides metrics data' do - metrics = subject.flat_map(&:metrics) - - expect(metrics.count).to eq(3) - expect(metrics[0]).to have_attributes(title: 'title', required_metrics: %w(metric_a metric_b), weight: 1) - expect(metrics[1]).to have_attributes(title: 'title', required_metrics: %w(metric_a), weight: 1) - expect(metrics[2]).to have_attributes(title: 'title', required_metrics: %w{metric_a}, weight: 1) - end - - it 'provides query data' do - queries = subject.flat_map(&:metrics).flat_map(&:queries) - - expect(queries.count).to eq(3) - expect(queries[0]).to eq(query_range: 'query_range_a', label: 'label', unit: 'unit') - expect(queries[1]).to eq(query_range: 'query_range_empty') - expect(queries[2]).to eq(query_range: 'query_range_a') - end - end - - shared_examples 'required field' do |field_name| - context "when #{field_name} is nil" do - before do - allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_missing) } - end - - it 'throws parsing error' do - expect { subject }.to raise_error(parser_error_class, /#{field_name} can't be blank/i) - end - end - - context "when #{field_name} are not specified" do - before do - allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_nil) } - end - - it 'throws parsing error' do - expect { subject }.to raise_error(parser_error_class, /#{field_name} can't be blank/i) - end - end - end - - describe 'group required fields' do - it_behaves_like 'required field', 'metrics' do - let(:field_nil) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - EOF - end - - let(:field_missing) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - EOF - end - end - - it_behaves_like 'required field', 'name' do - let(:field_nil) do - <<-EOF.strip_heredoc - - group: - priority: 1 - metrics: [] - EOF - end - - let(:field_missing) do - <<-EOF.strip_heredoc - - priority: 1 - metrics: [] - EOF - end - end - - it_behaves_like 'required field', 'priority' do - let(:field_nil) do - <<-EOF.strip_heredoc - - group: group_a - priority: - metrics: [] - EOF - end - - let(:field_missing) do - <<-EOF.strip_heredoc - - group: group_a - metrics: [] - EOF - end - end - end - - describe 'metrics fields parsing' do - it_behaves_like 'required field', 'title' do - let(:field_nil) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: - required_metrics: [] - weight: 1 - queries: [] - EOF - end - - let(:field_missing) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - required_metrics: [] - weight: 1 - queries: [] - EOF - end - end - - it_behaves_like 'required field', 'required metrics' do - let(:field_nil) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: title - required_metrics: - weight: 1 - queries: [] - EOF - end - - let(:field_missing) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: title - weight: 1 - queries: [] - EOF - end - end - - it_behaves_like 'required field', 'weight' do - let(:field_nil) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: title - required_metrics: [] - weight: - queries: [] - EOF - end - - let(:field_missing) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: title - required_metrics: [] - queries: [] - EOF - end - end - - it_behaves_like 'required field', :queries do - let(:field_nil) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: title - required_metrics: [] - weight: 1 - queries: - EOF - end - - let(:field_missing) do - <<-EOF.strip_heredoc - - group: group_a - priority: 1 - metrics: - - title: title - required_metrics: [] - weight: 1 - EOF - end - end - end - end -end diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb deleted file mode 100644 index b2350eff9f9..00000000000 --- a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do - around do |example| - travel_to(Time.local(2008, 9, 1, 12, 0, 0)) { example.run } - end - - include_examples 'additional metrics query' do - let(:project) { create(:project, :repository) } - let(:deployment) { create(:deployment, environment: environment, project: project) } - let(:query_params) { [deployment.id] } - - it 'queries using specific time' do - expect(client).to receive(:query_range).with(anything, - start_time: (deployment.created_at - 30.minutes).to_f, - end_time: (deployment.created_at + 30.minutes).to_f) - - expect(query_result).not_to be_nil - end - end -end diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb deleted file mode 100644 index d0dee2ad366..00000000000 --- a/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Prometheus::Queries::AdditionalMetricsEnvironmentQuery do - around do |example| - freeze_time { example.run } - end - - include_examples 'additional metrics query' do - let(:query_params) { [environment.id] } - - it 'queries using specific time' do - expect(client).to receive(:query_range) - .with(anything, start_time: 8.hours.ago.to_f, end_time: Time.now.to_f) - expect(query_result).not_to be_nil - end - - context 'when start and end time parameters are provided' do - let(:query_params) { [environment.id, start_time, end_time] } - - context 'as unix timestamps' do - let(:start_time) { 4.hours.ago.to_f } - let(:end_time) { 2.hours.ago.to_f } - - it 'queries using the provided times' do - expect(client).to receive(:query_range) - .with(anything, start_time: start_time, end_time: end_time) - expect(query_result).not_to be_nil - end - end - - context 'as Date/Time objects' do - let(:start_time) { 4.hours.ago } - let(:end_time) { 2.hours.ago } - - it 'queries using the provided times converted to unix' do - expect(client).to receive(:query_range) - .with(anything, start_time: start_time.to_f, end_time: end_time.to_f) - expect(query_result).not_to be_nil - end - end - end - end -end diff --git a/spec/lib/gitlab/rack_attack/request_spec.rb b/spec/lib/gitlab/rack_attack/request_spec.rb index e8433d99d15..9d2144f75db 100644 --- a/spec/lib/gitlab/rack_attack/request_spec.rb +++ b/spec/lib/gitlab/rack_attack/request_spec.rb @@ -249,6 +249,39 @@ RSpec.describe Gitlab::RackAttack::Request do end end + describe '#get_request_protected_path?' do + subject { request.get_request_protected_path? } + + before do + stub_application_setting( + protected_paths_for_get_request: %w[/protected /secure]) + end + + where(:path, :expected) do + '/' | false + '/groups' | false + '/foo/protected' | false + '/foo/secure' | false + + '/protected' | true + '/secure' | true + '/secure/' | true + '/secure/foo' | true + end + + with_them do + it { is_expected.to eq(expected) } + + context 'when the application is mounted at a relative URL' do + before do + stub_config_setting(relative_url_root: '/gitlab/root') + end + + it { is_expected.to eq(expected) } + end + end + end + describe '#frontend_request?', :allow_forgery_protection do subject { request.send(:frontend_request?) } diff --git a/spec/lib/gitlab/redis/chat_spec.rb b/spec/lib/gitlab/redis/chat_spec.rb index 7a008580936..f9080b4409f 100644 --- a/spec/lib/gitlab/redis/chat_spec.rb +++ b/spec/lib/gitlab/redis/chat_spec.rb @@ -2,6 +2,6 @@ require 'spec_helper' -RSpec.describe Gitlab::Redis::Chat, feature_category: :no_category do # rubocop: disable RSpec/InvalidFeatureCategory +RSpec.describe Gitlab::Redis::Chat, feature_category: :ai_abstraction_layer do include_examples "redis_new_instance_shared_examples", 'chat', Gitlab::Redis::Cache end diff --git a/spec/lib/gitlab/redis/etag_cache_spec.rb b/spec/lib/gitlab/redis/etag_cache_spec.rb deleted file mode 100644 index 182a41bac80..00000000000 --- a/spec/lib/gitlab/redis/etag_cache_spec.rb +++ /dev/null @@ -1,56 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Redis::EtagCache, feature_category: :shared do - # Note: this is a pseudo-store in front of `Cache`, meant only as a tool - # to move away from `SharedState` for etag cache data. Thus, we use the - # same store configuration as the former. - let(:instance_specific_config_file) { "config/redis.cache.yml" } - - include_examples "redis_shared_examples" - - describe '#pool' do - let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } - let(:rails_root) { mktmpdir } - - subject { described_class.pool } - - before do - # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists - allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(rails_root) - allow(Gitlab::Redis::Cache).to receive(:rails_root).and_return(rails_root) - - allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host) - allow(Gitlab::Redis::Cache).to receive(:config_file_name).and_return(config_new_format_socket) - end - - around do |example| - clear_pool - example.run - ensure - clear_pool - end - - it 'instantiates an instance of MultiStore' do - subject.with do |redis_instance| - expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore) - - expect(redis_instance.primary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0") - expect(redis_instance.secondary_store.connection[:id]).to eq("redis://test-host:6379/99") - - expect(redis_instance.instance_name).to eq('EtagCache') - end - end - - it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_etag_cache, - :use_primary_store_as_default_for_etag_cache - end - - describe '#store_name' do - it 'returns the name of the Cache store' do - expect(described_class.store_name).to eq('Cache') - end - end -end diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb index e15375c88c7..ce21c2269cc 100644 --- a/spec/lib/gitlab/redis/multi_store_spec.rb +++ b/spec/lib/gitlab/redis/multi_store_spec.rb @@ -1130,4 +1130,104 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end end end + + # NOTE: for pub/sub, unit tests are favoured over integration tests to avoid long polling + # with threads which could lead to flaky specs. The multiplexing behaviour are verified in + # 'with WRITE redis commands' and 'with READ redis commands' contexts. + context 'with pub/sub commands' do + let(:channel_name) { 'chanA' } + let(:message) { "msg" } + + shared_examples 'publishes to stores' do + it 'publishes to one or more stores' do + expect(stores).to all(receive(:publish)) + + multi_store.publish(channel_name, message) + end + end + + shared_examples 'subscribes and unsubscribes' do + it 'subscribes to the default store' do + expect(default_store).to receive(:subscribe) + expect(non_default_store).not_to receive(:subscribe) + + multi_store.subscribe(channel_name) + end + + it 'unsubscribes to the default store' do + expect(default_store).to receive(:unsubscribe) + expect(non_default_store).not_to receive(:unsubscribe) + + multi_store.unsubscribe + end + end + + context 'when using both stores' do + before do + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true) + end + + it_behaves_like 'publishes to stores' do + let(:stores) { [primary_store, secondary_store] } + end + + context 'with primary store set as default' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: true) + end + + it_behaves_like 'subscribes and unsubscribes' do + let(:default_store) { primary_store } + let(:non_default_store) { secondary_store } + end + end + + context 'with secondary store set as default' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: false) + end + + it_behaves_like 'subscribes and unsubscribes' do + let(:default_store) { secondary_store } + let(:non_default_store) { primary_store } + end + end + end + + context 'when only using the primary store' do + before do + stub_feature_flags( + use_primary_and_secondary_stores_for_test_store: false, + use_primary_store_as_default_for_test_store: true + ) + end + + it_behaves_like 'subscribes and unsubscribes' do + let(:default_store) { primary_store } + let(:non_default_store) { secondary_store } + end + + it_behaves_like 'publishes to stores' do + let(:stores) { [primary_store] } + end + end + + context 'when only using the secondary store' do + before do + stub_feature_flags( + use_primary_and_secondary_stores_for_test_store: false, + use_primary_store_as_default_for_test_store: false + ) + end + + it_behaves_like 'subscribes and unsubscribes' do + let(:default_store) { secondary_store } + let(:non_default_store) { primary_store } + end + + it_behaves_like 'publishes to stores' do + let(:stores) { [secondary_store] } + end + end + end end diff --git a/spec/lib/gitlab/redis/pubsub_spec.rb b/spec/lib/gitlab/redis/pubsub_spec.rb new file mode 100644 index 00000000000..e196d02116e --- /dev/null +++ b/spec/lib/gitlab/redis/pubsub_spec.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Redis::Pubsub, feature_category: :redis do + include_examples "redis_new_instance_shared_examples", 'pubsub', Gitlab::Redis::SharedState + include_examples "redis_shared_examples" +end diff --git a/spec/lib/gitlab/redis/queues_metadata_spec.rb b/spec/lib/gitlab/redis/queues_metadata_spec.rb new file mode 100644 index 00000000000..693e8074b45 --- /dev/null +++ b/spec/lib/gitlab/redis/queues_metadata_spec.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Redis::QueuesMetadata, feature_category: :redis do + include_examples "redis_new_instance_shared_examples", 'queues_metadata', Gitlab::Redis::Queues + include_examples "redis_shared_examples" + + describe '#pool' do + let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } + let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } + + subject { described_class.pool } + + around do |example| + clear_pool + example.run + ensure + clear_pool + end + + before do + allow(described_class).to receive(:config_file_name).and_return(config_new_format_host) + + allow(described_class).to receive(:config_file_name).and_return(config_new_format_host) + allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket) + end + + it 'instantiates an instance of MultiStore' do + subject.with do |redis_instance| + expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore) + + expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99") + expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0") + + expect(redis_instance.instance_name).to eq('QueuesMetadata') + end + end + + it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_queues_metadata, + :use_primary_store_as_default_for_queues_metadata + end +end diff --git a/spec/lib/gitlab/redis/workhorse_spec.rb b/spec/lib/gitlab/redis/workhorse_spec.rb new file mode 100644 index 00000000000..46931a6afcb --- /dev/null +++ b/spec/lib/gitlab/redis/workhorse_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Redis::Workhorse, feature_category: :scalability do + include_examples "redis_new_instance_shared_examples", 'workhorse', Gitlab::Redis::SharedState + include_examples "redis_shared_examples" + + describe '#pool' do + let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } + let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } + + subject { described_class.pool } + + before do + allow(described_class).to receive(:config_file_name).and_return(config_new_format_host) + + # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists + allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(mktmpdir) + allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket) + end + + around do |example| + clear_pool + example.run + ensure + clear_pool + end + + it 'instantiates an instance of MultiStore' do + subject.with do |redis_instance| + expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore) + + expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99") + expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0") + + expect(redis_instance.instance_name).to eq('Workhorse') + end + end + + it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_workhorse, + :use_primary_store_as_default_for_workhorse + end +end diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index df123ef638f..02ae3f63918 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -86,31 +86,6 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.to match('') } end - describe '.bulk_import_destination_namespace_path_regex_message' do - subject { described_class.bulk_import_destination_namespace_path_regex_message } - - it { - is_expected - .to eq("must have a relative path structure with no HTTP " \ - "protocol characters, or leading or trailing forward slashes. Path segments must not start or " \ - "end with a special character, and must not contain consecutive special characters." - ) - } - end - - describe '.bulk_import_source_full_path_regex_message' do - subject { described_class.bulk_import_source_full_path_regex_message } - - it { - is_expected - .to eq( - "must have a relative path structure with no HTTP " \ - "protocol characters, or leading or trailing forward slashes. Path segments must not start or " \ - "end with a special character, and must not contain consecutive special characters." - ) - } - end - describe '.group_path_regex' do subject { described_class.group_path_regex } diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb index 6cff0eff7e8..a4a7c139beb 100644 --- a/spec/lib/gitlab/repo_path_spec.rb +++ b/spec/lib/gitlab/repo_path_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe ::Gitlab::RepoPath do include Gitlab::Routing - let_it_be(:project) { create(:project, :repository) } + let_it_be(:project) { create(:project_with_design, :repository) } let_it_be(:personal_snippet) { create(:personal_snippet) } let_it_be(:project_snippet) { create(:project_snippet, project: project) } let_it_be(:redirect_route) { 'foo/bar/baz' } @@ -28,6 +28,10 @@ RSpec.describe ::Gitlab::RepoPath do it 'parses a project snippet repository path' do expect(described_class.parse("#{project.full_path}/snippets/#{project_snippet.id}")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, nil]) end + + it 'parses a full project design repository path' do + expect(described_class.parse(project.design_repository.full_path)).to match_array([project.design_management_repository, project, Gitlab::GlRepository::DESIGN, nil]) + end end context 'a relative path' do @@ -43,6 +47,10 @@ RSpec.describe ::Gitlab::RepoPath do expect(described_class.parse('/' + project.full_path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, nil]) end + it 'parses a relative design repository path' do + expect(described_class.parse(project.full_path + '.design.git')).to match_array([project.design_management_repository, project, Gitlab::GlRepository::DESIGN, nil]) + end + context 'of a redirected project' do it 'parses a relative repository path' do expect(described_class.parse(redirect.path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, redirect_route]) @@ -52,6 +60,10 @@ RSpec.describe ::Gitlab::RepoPath do expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, "#{redirect_route}.wiki"]) end + it 'parses a relative design repository path' do + expect(described_class.parse(redirect.path + '.design.git')).to match_array([project.design_management_repository, project, Gitlab::GlRepository::DESIGN, "#{redirect_route}.design"]) + end + it 'parses a relative path starting with /' do expect(described_class.parse('/' + redirect.path + '.git')).to eq([project, project, Gitlab::GlRepository::PROJECT, redirect_route]) end diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb index 725b7901e68..d1f19a5e1ba 100644 --- a/spec/lib/gitlab/search_results_spec.rb +++ b/spec/lib/gitlab/search_results_spec.rb @@ -16,8 +16,9 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do let(:query) { 'foo' } let(:filters) { {} } let(:sort) { nil } + let(:limit_projects) { Project.order(:id) } - subject(:results) { described_class.new(user, query, Project.order(:id), sort: sort, filters: filters) } + subject(:results) { described_class.new(user, query, limit_projects, sort: sort, filters: filters) } context 'as a user with access' do before do @@ -236,9 +237,14 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') } let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo open') } let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') } + let_it_be(:unarchived_project) { project } + let_it_be(:archived_project) { create(:project, :public, :archived) } + let_it_be(:unarchived_result) { create(:issue, project: unarchived_project, title: 'foo unarchived') } + let_it_be(:archived_result) { create(:issue, project: archived_project, title: 'foo archived') } include_examples 'search results filtered by state' include_examples 'search results filtered by confidential' + include_examples 'search results filtered by archived', 'search_issues_hide_archived_projects' end context 'ordering' do @@ -274,7 +280,7 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do let_it_be(:unarchived_result) { create(:project, :public, group: group, name: 'Test1') } let_it_be(:archived_result) { create(:project, :archived, :public, group: group, name: 'Test2') } - it_behaves_like 'search results filtered by archived', 'search_projects_hide_archived' + it_behaves_like 'search results filtered by archived' end end @@ -433,26 +439,32 @@ RSpec.describe Gitlab::SearchResults, feature_category: :global_search do end context 'milestones' do - it 'returns correct set of milestones' do - private_project_1 = create(:project, :private) - private_project_2 = create(:project, :private) - internal_project = create(:project, :internal) - public_project_1 = create(:project, :public) - public_project_2 = create(:project, :public, :issues_disabled, :merge_requests_disabled) + let_it_be(:archived_project) { create(:project, :public, :archived) } + let_it_be(:private_project_1) { create(:project, :private) } + let_it_be(:private_project_2) { create(:project, :private) } + let_it_be(:internal_project) { create(:project, :internal) } + let_it_be(:public_project_1) { create(:project, :public) } + let_it_be(:public_project_2) { create(:project, :public, :issues_disabled, :merge_requests_disabled) } + let_it_be(:hidden_milestone_1) { create(:milestone, project: private_project_2, title: 'Private project without access milestone') } + let_it_be(:hidden_milestone_2) { create(:milestone, project: public_project_2, title: 'Public project with milestones disabled milestone') } + let_it_be(:hidden_milestone_3) { create(:milestone, project: archived_project, title: 'Milestone from an archived project') } + let_it_be(:milestone_1) { create(:milestone, project: private_project_1, title: 'Private project with access milestone', state: 'closed') } + let_it_be(:milestone_2) { create(:milestone, project: internal_project, title: 'Internal project milestone') } + let_it_be(:milestone_3) { create(:milestone, project: public_project_1, title: 'Public project with milestones enabled milestone') } + let(:unarchived_result) { milestone_1 } + let(:archived_result) { hidden_milestone_3 } + let(:limit_projects) { ProjectsFinder.new(current_user: user).execute } + let(:query) { 'milestone' } + let(:scope) { 'milestones' } + + before do private_project_1.add_developer(user) - # milestones that should not be visible - create(:milestone, project: private_project_2, title: 'Private project without access milestone') - create(:milestone, project: public_project_2, title: 'Public project with milestones disabled milestone') - # milestones that should be visible - milestone_1 = create(:milestone, project: private_project_1, title: 'Private project with access milestone', state: 'closed') - milestone_2 = create(:milestone, project: internal_project, title: 'Internal project milestone') - milestone_3 = create(:milestone, project: public_project_1, title: 'Public project with milestones enabled milestone') - # Global search scope takes user authorized projects, internal projects and public projects. - limit_projects = ProjectsFinder.new(current_user: user).execute - - milestones = described_class.new(user, 'milestone', limit_projects).objects('milestones') - - expect(milestones).to match_array([milestone_1, milestone_2, milestone_3]) end + + it 'returns correct set of milestones' do + expect(results.objects(scope)).to match_array([milestone_1, milestone_2, milestone_3]) + end + + include_examples 'search results filtered by archived', 'search_milestones_hide_archived_projects' end end diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb index 774a362617a..faa8a206d74 100644 --- a/spec/lib/gitlab/security/scan_configuration_spec.rb +++ b/spec/lib/gitlab/security/scan_configuration_spec.rb @@ -57,6 +57,16 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do it { is_expected.to be_nil } end + describe '#on_demand_available?' do + subject { scan.on_demand_available? } + + let(:configured) { true } + let(:available) { true } + let(:type) { :sast } + + it { is_expected.to be_falsey } + end + describe '#can_enable_by_merge_request?' do subject { scan.can_enable_by_merge_request? } diff --git a/spec/lib/gitlab/setup_helper/workhorse_spec.rb b/spec/lib/gitlab/setup_helper/workhorse_spec.rb index 726b73a9dfe..e5a44abc731 100644 --- a/spec/lib/gitlab/setup_helper/workhorse_spec.rb +++ b/spec/lib/gitlab/setup_helper/workhorse_spec.rb @@ -24,8 +24,8 @@ RSpec.describe Gitlab::SetupHelper::Workhorse do end describe '.redis_url' do - it 'matches the SharedState URL' do - expect(Gitlab::Redis::SharedState).to receive(:url).and_return('foo') + it 'matches the Workhorse URL' do + expect(Gitlab::Redis::Workhorse).to receive(:url).and_return('foo') expect(described_class.redis_url).to eq('foo') end @@ -34,14 +34,14 @@ RSpec.describe Gitlab::SetupHelper::Workhorse do describe '.redis_db' do subject { described_class.redis_db } - it 'matches the SharedState DB' do - expect(Gitlab::Redis::SharedState).to receive(:params).and_return(db: 1) + it 'matches the Workhorse DB' do + expect(Gitlab::Redis::Workhorse).to receive(:params).and_return(db: 1) is_expected.to eq(1) end it 'defaults to 0 if unspecified' do - expect(Gitlab::Redis::SharedState).to receive(:params).and_return({}) + expect(Gitlab::Redis::Workhorse).to receive(:params).and_return({}) is_expected.to eq(0) end diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb index 14eb568b974..3ae1236cc7c 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Client, :clean_gitlab_redis_queues, -:clean_gitlab_redis_shared_state do +:clean_gitlab_redis_queues_metadata do shared_context 'deduplication worker class' do |strategy, including_scheduled| let(:worker_class) do Class.new do diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb index c22e7a1240f..937a1751cc7 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state, +RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, + :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state, :clean_gitlab_redis_queues_metadata, feature_category: :shared do using RSpec::Parameterized::TableSyntax @@ -78,11 +79,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end end - context 'with Redis cookies' do - def with_redis(&block) - Gitlab::Redis::Queues.with(&block) - end - + shared_examples 'with Redis cookies' do let(:cookie_key) { "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:#{idempotency_key}:cookie:v2" } let(:cookie) { get_redis_msgpack(cookie_key) } @@ -416,6 +413,62 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end end + context 'with multi-store feature flags turned on' do + def with_redis(&block) + Gitlab::Redis::QueuesMetadata.with(&block) + end + + shared_examples 'uses QueuesMetadata' do + it 'use Gitlab::Redis::QueuesMetadata.with' do + expect(Gitlab::Redis::QueuesMetadata).to receive(:with).and_call_original + expect(Gitlab::Redis::Queues).not_to receive(:with) + + duplicate_job.check! + end + end + + context 'when migration is ongoing with double-write' do + before do + stub_feature_flags(use_primary_store_as_default_for_queues_metadata: false) + end + + it_behaves_like 'uses QueuesMetadata' + it_behaves_like 'with Redis cookies' + end + + context 'when migration is completed' do + before do + stub_feature_flags(use_primary_and_secondary_stores_for_queues_metadata: false) + end + + it_behaves_like 'uses QueuesMetadata' + it_behaves_like 'with Redis cookies' + end + + it_behaves_like 'uses QueuesMetadata' + it_behaves_like 'with Redis cookies' + end + + context 'when both multi-store feature flags are off' do + def with_redis(&block) + Gitlab::Redis::Queues.with(&block) + end + + before do + stub_feature_flags(use_primary_and_secondary_stores_for_queues_metadata: false) + stub_feature_flags(use_primary_store_as_default_for_queues_metadata: false) + end + + it 'use Gitlab::Redis::Queues' do + expect(Gitlab::Redis::Queues).to receive(:with).and_call_original + expect(Gitlab::Redis::QueuesMetadata).not_to receive(:with) + + duplicate_job.check! + end + + it_behaves_like 'with Redis cookies' + end + describe '#scheduled?' do it 'returns false for non-scheduled jobs' do expect(duplicate_job.scheduled?).to be(false) diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb index 0cbf9eab3d8..a27e723e392 100644 --- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb @@ -402,11 +402,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do include Sidekiq::Worker include WorkerAttributes - if category - feature_category category - else - feature_category :not_owned - end + feature_category category || :not_owned def perform; end end diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb index c66e36c5621..bf379d9cb0d 100644 --- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb +++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do +RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues, + :clean_gitlab_redis_queues_metadata do def clear_queues Sidekiq::Queue.new('authorized_projects').clear Sidekiq::Queue.new('post_receive').clear diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb index 93632848788..8ceba7ca4b7 100644 --- a/spec/lib/gitlab/sidekiq_queue_spec.rb +++ b/spec/lib/gitlab/sidekiq_queue_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do +RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues, :clean_gitlab_redis_queues_metadata do around do |example| Sidekiq::Queue.new('foobar').clear Sidekiq::Testing.disable!(&example) diff --git a/spec/lib/gitlab/sql/cte_spec.rb b/spec/lib/gitlab/sql/cte_spec.rb index 523380eae34..d2d3fdbb450 100644 --- a/spec/lib/gitlab/sql/cte_spec.rb +++ b/spec/lib/gitlab/sql/cte_spec.rb @@ -15,8 +15,7 @@ RSpec.describe Gitlab::SQL::CTE do expected = [ "#{name} AS ", - Gitlab::Database::AsWithMaterialized.materialized_if_supported, - (' ' unless Gitlab::Database::AsWithMaterialized.materialized_if_supported.blank?), + 'MATERIALIZED ', "(#{sql1})" ].join diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb index a34ddf8773c..7bd2ddf2889 100644 --- a/spec/lib/gitlab/sql/pattern_spec.rb +++ b/spec/lib/gitlab/sql/pattern_spec.rb @@ -9,36 +9,44 @@ RSpec.describe Gitlab::SQL::Pattern do let_it_be(:issue1) { create(:issue, title: 'noise foo noise', description: 'noise bar noise') } let_it_be(:issue2) { create(:issue, title: 'noise baz noise', description: 'noise foo noise') } let_it_be(:issue3) { create(:issue, title: 'Oh', description: 'Ah') } + let_it_be(:issue4) { create(:issue, title: 'beep beep', description: 'beep beep') } + let_it_be(:issue5) { create(:issue, title: 'beep', description: 'beep') } - subject(:fuzzy_search) { Issue.fuzzy_search(query, columns) } + subject(:fuzzy_search) { Issue.fuzzy_search(query, columns, exact_matches_first: exact_matches_first) } - where(:query, :columns, :expected) do - 'foo' | [Issue.arel_table[:title]] | %i[issue1] + where(:query, :columns, :exact_matches_first, :expected) do + 'foo' | [Issue.arel_table[:title]] | false | %i[issue1] - 'foo' | %i[title] | %i[issue1] - 'foo' | %w[title] | %i[issue1] - 'foo' | %i[description] | %i[issue2] - 'foo' | %i[title description] | %i[issue1 issue2] - 'bar' | %i[title description] | %i[issue1] - 'baz' | %i[title description] | %i[issue2] - 'qux' | %i[title description] | [] + 'foo' | %i[title] | false | %i[issue1] + 'foo' | %w[title] | false | %i[issue1] + 'foo' | %i[description] | false | %i[issue2] + 'foo' | %i[title description] | false | %i[issue1 issue2] + 'bar' | %i[title description] | false | %i[issue1] + 'baz' | %i[title description] | false | %i[issue2] + 'qux' | %i[title description] | false | [] - 'oh' | %i[title description] | %i[issue3] - 'OH' | %i[title description] | %i[issue3] - 'ah' | %i[title description] | %i[issue3] - 'AH' | %i[title description] | %i[issue3] - 'oh' | %i[title] | %i[issue3] - 'ah' | %i[description] | %i[issue3] + 'oh' | %i[title description] | false | %i[issue3] + 'OH' | %i[title description] | false | %i[issue3] + 'ah' | %i[title description] | false | %i[issue3] + 'AH' | %i[title description] | false | %i[issue3] + 'oh' | %i[title] | false | %i[issue3] + 'ah' | %i[description] | false | %i[issue3] - '' | %i[title] | %i[issue1 issue2 issue3] - %w[a b] | %i[title] | %i[issue1 issue2 issue3] + '' | %i[title] | false | %i[issue1 issue2 issue3 issue4 issue5] + %w[a b] | %i[title] | false | %i[issue1 issue2 issue3 issue4 issue5] + + 'beep' | %i[title] | true | %i[issue5 issue4] end with_them do let(:expected_issues) { expected.map { |sym| send(sym) } } it 'finds the expected issues' do - expect(fuzzy_search).to match_array(expected_issues) + if exact_matches_first + expect(fuzzy_search).to eq(expected_issues) + else + expect(fuzzy_search).to match_array(expected_issues) + end end end end diff --git a/spec/lib/gitlab/time_tracking_formatter_spec.rb b/spec/lib/gitlab/time_tracking_formatter_spec.rb index aa755d64a7a..b3372f676d4 100644 --- a/spec/lib/gitlab/time_tracking_formatter_spec.rb +++ b/spec/lib/gitlab/time_tracking_formatter_spec.rb @@ -28,6 +28,14 @@ RSpec.describe Gitlab::TimeTrackingFormatter, feature_category: :team_planning d end end + context 'when the duration is nil' do + let(:duration_string) { nil } + + it 'returns nil' do + expect(subject).to be_nil + end + end + context 'when the duration is zero' do let(:duration_string) { '0h' } diff --git a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb index 78a869b535a..5a5c7123971 100644 --- a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb +++ b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb @@ -63,6 +63,10 @@ RSpec.describe Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :do_not_s context 'when on gitlab.com environment' do let(:endpoint) { 'db-snowplow.trx.gitlab.net' } + before do + stub_application_setting(snowplow_database_collector_hostname: endpoint) + end + it 'sends event to tracker' do allow(Gitlab).to receive(:com?).and_return(true) allow(tracker).to receive(:track_struct_event).and_call_original diff --git a/spec/lib/gitlab/tracking/service_ping_context_spec.rb b/spec/lib/gitlab/tracking/service_ping_context_spec.rb index 7530650b902..3da9a588c0e 100644 --- a/spec/lib/gitlab/tracking/service_ping_context_spec.rb +++ b/spec/lib/gitlab/tracking/service_ping_context_spec.rb @@ -7,29 +7,27 @@ RSpec.describe Gitlab::Tracking::ServicePingContext do using RSpec::Parameterized::TableSyntax context 'with valid configuration' do - where(:data_source, :event, :key_path) do - :redis | nil | 'counts.some_metric' - :redis_hll | 'some_event' | nil + where(:data_source, :event) do + :redis | 'some_event' + :redis_hll | 'some_event' end with_them do it 'does not raise errors' do - expect { described_class.new(data_source: data_source, event: event, key_path: key_path) }.not_to raise_error + expect { described_class.new(data_source: data_source, event: event) }.not_to raise_error end end end context 'with invalid configuration' do - where(:data_source, :event, :key_path) do - :redis | nil | nil - :redis | 'some_event' | nil - :redis_hll | nil | nil - :redis_hll | nil | 'some key_path' - :random | 'some_event' | nil + where(:data_source, :event) do + :redis | nil + :redis_hll | nil + :random | 'some_event' end with_them do - subject(:new_instance) { described_class.new(data_source: data_source, event: event, key_path: key_path) } + subject(:new_instance) { described_class.new(data_source: data_source, event: event) } it 'does not raise errors' do expect { new_instance }.to raise_error(ArgumentError) @@ -48,10 +46,10 @@ RSpec.describe Gitlab::Tracking::ServicePingContext do end context 'for redis data source' do - let(:context_instance) { described_class.new(data_source: :redis, key_path: 'counts.sample_metric') } + let(:context_instance) { described_class.new(data_source: :redis, event: 'some_event') } it 'contains event_name' do - expect(context_instance.to_context.to_json.dig(:data, :key_path)).to eq('counts.sample_metric') + expect(context_instance.to_context.to_json.dig(:data, :event_name)).to eq('some_event') end end end diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb index c44cfdea1cd..4485a30ae66 100644 --- a/spec/lib/gitlab/tracking/standard_context_spec.rb +++ b/spec/lib/gitlab/tracking/standard_context_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Tracking::StandardContext do +RSpec.describe Gitlab::Tracking::StandardContext, feature_category: :service_ping do let(:snowplow_context) { subject.to_context } describe '#to_context' do @@ -76,6 +76,7 @@ RSpec.describe Gitlab::Tracking::StandardContext do it 'holds the correct values', :aggregate_failures do json_data = snowplow_context.to_json.fetch(:data) expect(json_data[:user_id]).to eq(user_id) + expect(json_data[:is_gitlab_team_member]).to eq(nil) expect(json_data[:project_id]).to eq(project_id) expect(json_data[:namespace_id]).to eq(namespace_id) expect(json_data[:plan]).to eq(plan_name) diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb index 73627d3e6ff..865a8384405 100644 --- a/spec/lib/gitlab/url_builder_spec.rb +++ b/spec/lib/gitlab/url_builder_spec.rb @@ -32,6 +32,9 @@ RSpec.describe Gitlab::UrlBuilder do :ci_build | ->(build) { "/#{build.project.full_path}/-/jobs/#{build.id}" } :design | ->(design) { "/#{design.project.full_path}/-/design_management/designs/#{design.id}/raw_image" } + [:issue, :group_level] | ->(issue) { "/groups/#{issue.namespace.full_path}/-/work_items/#{issue.iid}" } + [:work_item, :group_level] | ->(work_item) { "/groups/#{work_item.namespace.full_path}/-/work_items/#{work_item.iid}" } + :group | ->(group) { "/groups/#{group.full_path}" } :group_milestone | ->(milestone) { "/groups/#{milestone.group.full_path}/-/milestones/#{milestone.iid}" } diff --git a/spec/lib/gitlab/url_sanitizer_spec.rb b/spec/lib/gitlab/url_sanitizer_spec.rb index 5f76c1de5b1..2c2ef8f13fb 100644 --- a/spec/lib/gitlab/url_sanitizer_spec.rb +++ b/spec/lib/gitlab/url_sanitizer_spec.rb @@ -91,6 +91,25 @@ RSpec.describe Gitlab::UrlSanitizer do end end + describe '.sanitize_masked_url' do + where(:original_url, :masked_url) do + 'http://{domain}.com' | 'http://{domain}.com' + 'http://{domain}/{hook}' | 'http://{domain}/{hook}' + 'http://user:pass@{domain}/hook' | 'http://*****:*****@{domain}/hook' + 'http://user:pass@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook' + 'http://user:@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook' + 'http://:pass@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook' + 'http://user@{domain}:{port}/hook' | 'http://*****:*****@{domain}:{port}/hook' + 'http://u:p@{domain}/hook?email=james@example.com' | 'http://*****:*****@{domain}/hook?email=james@example.com' + 'http://{domain}/hook?email=james@example.com' | 'http://{domain}/hook?email=james@example.com' + 'http://user:{pass}@example.com' | 'http://*****:*****@example.com' + end + + with_them do + it { expect(described_class.sanitize_masked_url(original_url)).to eq(masked_url) } + end + end + describe '#sanitized_url' do context 'credentials in hash' do where(username: ['foo', '', nil], password: ['bar', '', nil]) diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb index 859f3f7a8d7..6695736e54c 100644 --- a/spec/lib/gitlab/usage/metric_definition_spec.rb +++ b/spec/lib/gitlab/usage/metric_definition_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Usage::MetricDefinition do +RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping do let(:attributes) do { description: 'GitLab instance unique identifier', @@ -109,6 +109,42 @@ RSpec.describe Gitlab::Usage::MetricDefinition do end end + describe '#to_context' do + subject { definition.to_context } + + context 'with data_source redis_hll metric' do + before do + attributes[:data_source] = 'redis_hll' + attributes[:options] = { events: %w[some_event_1 some_event_2] } + end + + it 'returns a ServicePingContext with first event as event_name' do + expect(subject.to_h[:data][:event_name]).to eq('some_event_1') + end + end + + context 'with data_source redis metric' do + before do + attributes[:data_source] = 'redis' + attributes[:options] = { prefix: 'web_ide', event: 'views_count', include_usage_prefix: false } + end + + it 'returns a ServicePingContext with redis key as event_name' do + expect(subject.to_h[:data][:event_name]).to eq('WEB_IDE_VIEWS_COUNT') + end + end + + context 'with data_source database metric' do + before do + attributes[:data_source] = 'database' + end + + it 'returns nil' do + is_expected.to be_nil + end + end + end + describe '#validate' do using RSpec::Parameterized::TableSyntax @@ -117,7 +153,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do :value_type | nil :value_type | 'test' :status | nil - :milestone | nil + :milestone | 10.0 :data_category | nil :key_path | nil :product_group | nil @@ -233,26 +269,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do end end - describe 'statuses' do - using RSpec::Parameterized::TableSyntax - - where(:status, :skip_validation?) do - 'active' | false - 'broken' | false - 'removed' | true - end - - with_them do - subject(:validation) do - described_class.new(path, attributes.merge( { status: status } )).send(:skip_validation?) - end - - it 'returns true/false for skip_validation' do - expect(validation).to eq(skip_validation?) - end - end - end - describe '.load_all!' do let(:metric1) { Dir.mktmpdir('metric1') } let(:metric2) { Dir.mktmpdir('metric2') } diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb index e66dd04b69b..4544a3a60a1 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/batched_background_migration_failed_jobs_metric_spec.rb @@ -18,6 +18,20 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BatchedBackgroundMigrat ] end + let(:start) { 9.days.ago.to_fs(:db) } + let(:finish) { 2.days.ago.to_fs(:db) } + + let(:expected_query) do + "SELECT \"batched_background_migrations\".\"table_name\", \"batched_background_migrations\".\"job_class_name\", " \ + "COUNT(batched_jobs) AS number_of_failed_jobs " \ + "FROM \"batched_background_migrations\" " \ + "INNER JOIN \"batched_background_migration_jobs\" \"batched_jobs\" " \ + "ON \"batched_jobs\".\"batched_background_migration_id\" = \"batched_background_migrations\".\"id\" " \ + "WHERE \"batched_jobs\".\"status\" = 2 " \ + "AND \"batched_background_migrations\".\"created_at\" BETWEEN '#{start}' AND '#{finish}' " \ + "GROUP BY \"batched_background_migrations\".\"table_name\", \"batched_background_migrations\".\"job_class_name\"" + end + let_it_be(:active_migration) do create(:batched_background_migration, :active, table_name: 'users', job_class_name: 'test', created_at: 5.days.ago) end @@ -36,5 +50,5 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BatchedBackgroundMigrat let_it_be(:old_batched_job) { create(:batched_background_migration_job, :failed, batched_migration: old_migration) } - it_behaves_like 'a correct instrumented metric value', { time_frame: '7d' } + it_behaves_like 'a correct instrumented metric value and query', { time_frame: '7d' } end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb new file mode 100644 index 00000000000..208d5c259ca --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_connected_agents_metric_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountConnectedAgentsMetric, feature_category: :service_ping do + let_it_be(:agent_token_connected) { create(:cluster_agent_token, :active, last_used_at: 2.minutes.ago) } + let_it_be(:agent_token_disconnected) { create(:cluster_agent_token) } + + let(:expected_value) { 1 } + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' } +end diff --git a/spec/lib/gitlab/usage/metrics/query_spec.rb b/spec/lib/gitlab/usage/metrics/query_spec.rb index 355d619f768..750d340551a 100644 --- a/spec/lib/gitlab/usage/metrics/query_spec.rb +++ b/spec/lib/gitlab/usage/metrics/query_spec.rb @@ -77,7 +77,7 @@ RSpec.describe Gitlab::Usage::Metrics::Query do it 'returns the histogram sql' do expect(described_class.for(:histogram, AlertManagement::HttpIntegration.active, :project_id, buckets: 1..2, bucket_size: 101)) - .to match(/^WITH "count_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/) + .to match(/^WITH "count_cte" AS MATERIALIZED/) end end diff --git a/spec/lib/gitlab/usage/time_series_storable_spec.rb b/spec/lib/gitlab/usage/time_series_storable_spec.rb new file mode 100644 index 00000000000..420a87c5483 --- /dev/null +++ b/spec/lib/gitlab/usage/time_series_storable_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::TimeSeriesStorable, feature_category: :service_ping do + let(:counter_class) do + Class.new do + include Gitlab::Usage::TimeSeriesStorable + + def redis_key(event, date) + key = apply_time_aggregation(event, date) + "#{key}:" + end + end + end + + let(:counter_instance) { counter_class.new } + + describe '#apply_time_aggregation' do + let(:key) { "key3" } + let(:time) { Date.new(2023, 5, 1) } + + it 'returns proper key for given time' do + expect(counter_instance.apply_time_aggregation(key, time)).to eq("key3-2023-18") + end + end + + describe '#keys_for_aggregation' do + let(:result) { counter_instance.keys_for_aggregation(**params) } + let(:params) { base_params } + let(:base_params) { { events: events, start_date: start_date, end_date: end_date } } + let(:events) { %w[event1 event2] } + let(:start_date) { Date.new(2023, 4, 1) } + let(:end_date) { Date.new(2023, 4, 15) } + + it 'returns proper keys' do + expect(result).to match_array(["event1-2023-13:", "event1-2023-14:", "event2-2023-13:", "event2-2023-14:"]) + end + end +end diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb index 11c6ea2fc9d..eeef9406841 100644 --- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do +RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter, feature_category: :pipeline_composition do describe '.track_unique_project_event' do using RSpec::Parameterized::TableSyntax include SnowplowHelpers diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb index 50e20e4fbcf..21a820deaa4 100644 --- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb @@ -3,304 +3,244 @@ require 'spec_helper' RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_gitlab_redis_shared_state do - let_it_be(:user1) { build(:user, id: 1) } + let_it_be(:user) { build(:user, id: 1) } let_it_be(:user2) { build(:user, id: 2) } let_it_be(:user3) { build(:user, id: 3) } let_it_be(:project) { create(:project) } - let_it_be(:category) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CATEGORY } - let_it_be(:event_action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_ACTION } - let_it_be(:event_label) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL } - let(:original_params) { nil } - let(:event_property) { action } let(:time) { Time.zone.now } + let(:namespace) { project&.namespace } context 'for Issue title edit actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_TITLE_CHANGED } - def track_action(params) - described_class.track_issue_title_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_title_changed_action(author: user, project: project) } end end context 'for Issue description edit actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED } - def track_action(params) - described_class.track_issue_description_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_description_changed_action(author: user, project: project) } end end context 'for Issue assignee edit actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED } - def track_action(params) - described_class.track_issue_assignee_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_assignee_changed_action(author: user, project: project) } end end context 'for Issue make confidential actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL } - def track_action(params) - described_class.track_issue_made_confidential_action(**params) - end + subject(:track_event) { described_class.track_issue_made_confidential_action(author: user, project: project) } end end context 'for Issue make visible actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_MADE_VISIBLE } - def track_action(params) - described_class.track_issue_made_visible_action(**params) - end + subject(:track_event) { described_class.track_issue_made_visible_action(author: user, project: project) } end end context 'for Issue created actions' do - it_behaves_like 'tracked issuable internal event with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_CREATED } - let(:original_params) { { namespace: project.project_namespace.reload } } + let(:project) { nil } - def track_action(params) - described_class.track_issue_created_action(**params) - end + subject(:track_event) { described_class.track_issue_created_action(author: user, namespace: namespace) } end end context 'for Issue closed actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_CLOSED } - def track_action(params) - described_class.track_issue_closed_action(**params) - end + subject(:track_event) { described_class.track_issue_closed_action(author: user, project: project) } end end context 'for Issue reopened actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_REOPENED } - def track_action(params) - described_class.track_issue_reopened_action(**params) - end + subject(:track_event) { described_class.track_issue_reopened_action(author: user, project: project) } end end context 'for Issue label changed actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_LABEL_CHANGED } - def track_action(params) - described_class.track_issue_label_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_label_changed_action(author: user, project: project) } end end context 'for Issue label milestone actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_MILESTONE_CHANGED } - def track_action(params) - described_class.track_issue_milestone_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_milestone_changed_action(author: user, project: project) } end end context 'for Issue cross-referenced actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_CROSS_REFERENCED } - def track_action(params) - described_class.track_issue_cross_referenced_action(**params) - end + subject(:track_event) { described_class.track_issue_cross_referenced_action(author: user, project: project) } end end context 'for Issue moved actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_MOVED } - def track_action(params) - described_class.track_issue_moved_action(**params) - end + subject(:track_event) { described_class.track_issue_moved_action(author: user, project: project) } end end context 'for Issue cloned actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do - let_it_be(:action) { described_class::ISSUE_CLONED } + it_behaves_like 'internal event tracking' do + let(:action) { described_class::ISSUE_CLONED } - def track_action(params) - described_class.track_issue_cloned_action(**params) - end + subject(:track_event) { described_class.track_issue_cloned_action(author: user, project: project) } end end context 'for Issue relate actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_RELATED } - def track_action(params) - described_class.track_issue_related_action(**params) - end + subject(:track_event) { described_class.track_issue_related_action(author: user, project: project) } end end context 'for Issue unrelate actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_UNRELATED } - def track_action(params) - described_class.track_issue_unrelated_action(**params) - end + subject(:track_event) { described_class.track_issue_unrelated_action(author: user, project: project) } end end context 'for Issue marked as duplicate actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_MARKED_AS_DUPLICATE } - def track_action(params) - described_class.track_issue_marked_as_duplicate_action(**params) - end + subject(:track_event) { described_class.track_issue_marked_as_duplicate_action(author: user, project: project) } end end context 'for Issue locked actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_LOCKED } - def track_action(params) - described_class.track_issue_locked_action(**params) - end + subject(:track_event) { described_class.track_issue_locked_action(author: user, project: project) } end end context 'for Issue unlocked actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_UNLOCKED } - def track_action(params) - described_class.track_issue_unlocked_action(**params) - end + subject(:track_event) { described_class.track_issue_unlocked_action(author: user, project: project) } end end context 'for Issue designs added actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_DESIGNS_ADDED } - def track_action(params) - described_class.track_issue_designs_added_action(**params) - end + subject(:track_event) { described_class.track_issue_designs_added_action(author: user, project: project) } end end context 'for Issue designs modified actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_DESIGNS_MODIFIED } - def track_action(params) - described_class.track_issue_designs_modified_action(**params) - end + subject(:track_event) { described_class.track_issue_designs_modified_action(author: user, project: project) } end end context 'for Issue designs removed actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_DESIGNS_REMOVED } - def track_action(params) - described_class.track_issue_designs_removed_action(**params) - end + subject(:track_event) { described_class.track_issue_designs_removed_action(author: user, project: project) } end end context 'for Issue due date changed actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_DUE_DATE_CHANGED } - def track_action(params) - described_class.track_issue_due_date_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_due_date_changed_action(author: user, project: project) } end end context 'for Issue time estimate changed actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_TIME_ESTIMATE_CHANGED } - def track_action(params) - described_class.track_issue_time_estimate_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_time_estimate_changed_action(author: user, project: project) } end end context 'for Issue time spent changed actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_TIME_SPENT_CHANGED } - def track_action(params) - described_class.track_issue_time_spent_changed_action(**params) - end + subject(:track_event) { described_class.track_issue_time_spent_changed_action(author: user, project: project) } end end context 'for Issue comment added actions', :snowplow do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_COMMENT_ADDED } - def track_action(params) - described_class.track_issue_comment_added_action(**params) - end + subject(:track_event) { described_class.track_issue_comment_added_action(author: user, project: project) } end end context 'for Issue comment edited actions', :snowplow do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_COMMENT_EDITED } - def track_action(params) - described_class.track_issue_comment_edited_action(**params) - end + subject(:track_event) { described_class.track_issue_comment_edited_action(author: user, project: project) } end end context 'for Issue comment removed actions', :snowplow do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_COMMENT_REMOVED } - def track_action(params) - described_class.track_issue_comment_removed_action(**params) - end + subject(:track_event) { described_class.track_issue_comment_removed_action(author: user, project: project) } end end context 'for Issue design comment removed actions' do - it_behaves_like 'tracked issuable snowplow and service ping events with project' do + it_behaves_like 'internal event tracking' do let(:action) { described_class::ISSUE_DESIGN_COMMENT_REMOVED } - def track_action(params) - described_class.track_issue_design_comment_removed_action(**params) - end + subject(:track_event) { described_class.track_issue_design_comment_removed_action(author: user, project: project) } end end it 'can return the count of actions per user deduplicated' do travel_to(Date.today.beginning_of_week) do # because events aggregated by week we need to emit events in the same week - described_class.track_issue_title_changed_action(author: user1, project: project) - described_class.track_issue_description_changed_action(author: user1, project: project) - described_class.track_issue_assignee_changed_action(author: user1, project: project) + described_class.track_issue_title_changed_action(author: user, project: project) + described_class.track_issue_description_changed_action(author: user, project: project) + described_class.track_issue_assignee_changed_action(author: user, project: project) end travel_to(Date.today.beginning_of_week + 2.days) do diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb index 9562f1c5500..1ea2ea144df 100644 --- a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb @@ -15,7 +15,8 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do 'k8s_api_proxy_request' => 2, 'flux_git_push_notifications_total' => 3, 'k8s_api_proxy_requests_via_ci_access' => 4, - 'k8s_api_proxy_requests_via_user_access' => 5 + 'k8s_api_proxy_requests_via_user_access' => 5, + 'k8s_api_proxy_requests_via_pat_access' => 6 } end @@ -31,7 +32,8 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do kubernetes_agent_k8s_api_proxy_request: 6, kubernetes_agent_flux_git_push_notifications_total: 9, kubernetes_agent_k8s_api_proxy_requests_via_ci_access: 12, - kubernetes_agent_k8s_api_proxy_requests_via_user_access: 15 + kubernetes_agent_k8s_api_proxy_requests_via_user_access: 15, + kubernetes_agent_k8s_api_proxy_requests_via_pat_access: 18 ) end diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb index 30588324adf..3ec7bf33623 100644 --- a/spec/lib/gitlab/usage_data_queries_spec.rb +++ b/spec/lib/gitlab/usage_data_queries_spec.rb @@ -82,7 +82,7 @@ RSpec.describe Gitlab::UsageDataQueries do it 'returns the histogram sql' do expect(described_class.histogram(AlertManagement::HttpIntegration.active, :project_id, buckets: 1..2, bucket_size: 101)) - .to match(/^WITH "count_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/) + .to match(/^WITH "count_cte" AS MATERIALIZED/) end end diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index 94c4544f754..143d0484392 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -397,7 +397,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic user = create(:user) project = create(:project, creator: user) issue = create(:issue, project: project, author: user) - create(:issue, project: project, author: User.support_bot) + create(:issue, project: project, author: Users::Internal.support_bot) create(:note, project: project, noteable: issue, author: user) create(:todo, project: project, target: issue, author: user) create(:jira_integration, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user)) @@ -431,7 +431,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic user = create(:user) project = create(:project, creator: user) create(:issue, project: project, author: user) - create(:issue, project: project, author: User.support_bot) + create(:issue, project: project, author: Users::Internal.support_bot) end expect(described_class.usage_activity_by_stage_plan({})).to include(issues: 3) @@ -556,7 +556,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic expect(count_data[:issues_using_zoom_quick_actions]).to eq(3) expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2) expect(count_data[:incident_issues]).to eq(4) - expect(count_data[:issues_created_from_alerts]).to eq(3) + expect(count_data[:issues_created_from_alerts]).to eq(2) expect(count_data[:alert_bot_incident_issues]).to eq(4) expect(count_data[:clusters_enabled]).to eq(6) expect(count_data[:project_clusters_enabled]).to eq(4) @@ -883,7 +883,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic let(:project) { create(:project, :service_desk_enabled) } it 'gathers Service Desk data' do - create_list(:issue, 2, :confidential, author: User.support_bot, project: project) + create_list(:issue, 2, :confidential, author: Users::Internal.support_bot, project: project) expect(subject).to eq(service_desk_enabled_projects: 1, service_desk_issues: 2) diff --git a/spec/lib/gitlab/user_access_snippet_spec.rb b/spec/lib/gitlab/user_access_snippet_spec.rb index 916e920e2ac..fd71a6ce0a5 100644 --- a/spec/lib/gitlab/user_access_snippet_spec.rb +++ b/spec/lib/gitlab/user_access_snippet_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Gitlab::UserAccessSnippet do let_it_be(:project) { create(:project, :private) } let_it_be(:snippet) { create(:project_snippet, :private, project: project) } - let_it_be(:migration_bot) { User.migration_bot } + let_it_be(:migration_bot) { Users::Internal.migration_bot } let(:user) { create(:user) } diff --git a/spec/lib/gitlab/utils/markdown_spec.rb b/spec/lib/gitlab/utils/markdown_spec.rb index 45953c7906e..d707cf51712 100644 --- a/spec/lib/gitlab/utils/markdown_spec.rb +++ b/spec/lib/gitlab/utils/markdown_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Gitlab::Utils::Markdown do +RSpec.describe Gitlab::Utils::Markdown, feature_category: :gitlab_docs do let(:klass) do Class.new do include Gitlab::Utils::Markdown @@ -53,25 +53,30 @@ RSpec.describe Gitlab::Utils::Markdown do end context 'when string has a product suffix' do - %w[CORE STARTER PREMIUM ULTIMATE FREE BRONZE SILVER GOLD].each do |tier| - ['', ' ONLY', ' SELF', ' SAAS'].each do |modifier| - context "#{tier}#{modifier}" do - let(:string) { "My Header (#{tier}#{modifier})" } - - it 'ignores a product suffix' do - is_expected.to eq 'my-header' - end - - context 'with "*" around a product suffix' do - let(:string) { "My Header **(#{tier}#{modifier})**" } - - it 'ignores a product suffix' do - is_expected.to eq 'my-header' + %w[PREMIUM ULTIMATE FREE].each do |tier| + [' ALL', ' SELF', ' SAAS'].each do |modifier| + ['', ' BETA', ' EXPERIMENT'].each do |status| + context "#{tier}#{modifier}#{status}" do + context 'with "*" around a product suffix' do + let(:string) { "My Header **(#{tier}#{modifier}#{status})**" } + + it 'ignores a product suffix' do + is_expected.to eq 'my-header' + end end end end end end + %w[BETA EXPERIMENT].each do |status| + context 'with "*" around a product suffix' do + let(:string) { "My Header **(#{status})**" } + + it 'ignores a product suffix' do + is_expected.to eq 'my-header' + end + end + end end context 'when string is empty' do diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index a1c2f7d667f..9bc1ebaebcb 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Workhorse do +RSpec.describe Gitlab::Workhorse, feature_category: :shared do let_it_be(:project) { create(:project, :repository) } let(:features) { { 'gitaly-feature-enforce-requests-limits' => 'true' } } @@ -365,19 +365,72 @@ RSpec.describe Gitlab::Workhorse do end end + describe '.cleanup_key' do + let(:key) { 'test-key' } + let(:value) { 'test-value' } + + subject(:cleanup_key) { described_class.cleanup_key(key) } + + shared_examples 'cleans up key' do |redis = Gitlab::Redis::Workhorse| + before do + described_class.set_key_and_notify(key, value) + end + + it 'deletes the key' do + expect { cleanup_key } + .to change { redis.with { |c| c.exists?(key) } }.from(true).to(false) + end + end + + it_behaves_like 'cleans up key' + + context 'when workhorse migration feature flags are disabled' do + before do + stub_feature_flags( + use_primary_and_secondary_stores_for_workhorse: false, + use_primary_store_as_default_for_workhorse: false + ) + end + + it_behaves_like 'cleans up key', Gitlab::Redis::SharedState + end + + context 'when either workhorse migration feature flags are enabled' do + context 'when use_primary_and_secondary_stores_for_workhorse is enabled' do + before do + stub_feature_flags( + use_primary_store_as_default_for_workhorse: false + ) + end + + it_behaves_like 'cleans up key' + end + + context 'when use_primary_store_as_default_for_workhorse is enabled' do + before do + stub_feature_flags( + use_primary_and_secondary_stores_for_workhorse: false + ) + end + + it_behaves_like 'cleans up key' + end + end + end + describe '.set_key_and_notify' do let(:key) { 'test-key' } let(:value) { 'test-value' } subject { described_class.set_key_and_notify(key, value, overwrite: overwrite) } - shared_examples 'set and notify' do + shared_examples 'set and notify' do |redis = Gitlab::Redis::Workhorse| it 'set and return the same value' do is_expected.to eq(value) end it 'set and notify' do - expect(Gitlab::Redis::SharedState).to receive(:with).and_call_original + expect(redis).to receive(:with).and_call_original expect_any_instance_of(::Redis).to receive(:publish) .with(described_class::NOTIFICATION_PREFIX + 'test-key', "test-value") @@ -389,6 +442,39 @@ RSpec.describe Gitlab::Workhorse do let(:overwrite) { true } it_behaves_like 'set and notify' + + context 'when workhorse migration feature flags are disabled' do + before do + stub_feature_flags( + use_primary_and_secondary_stores_for_workhorse: false, + use_primary_store_as_default_for_workhorse: false + ) + end + + it_behaves_like 'set and notify', Gitlab::Redis::SharedState + end + + context 'when either workhorse migration feature flags are enabled' do + context 'when use_primary_and_secondary_stores_for_workhorse is enabled' do + before do + stub_feature_flags( + use_primary_store_as_default_for_workhorse: false + ) + end + + it_behaves_like 'set and notify' + end + + context 'when use_primary_store_as_default_for_workhorse is enabled' do + before do + stub_feature_flags( + use_primary_and_secondary_stores_for_workhorse: false + ) + end + + it_behaves_like 'set and notify' + end + end end context 'when we set an existing key' do @@ -519,18 +605,53 @@ RSpec.describe Gitlab::Workhorse do describe '.send_dependency' do let(:headers) { { Accept: 'foo', Authorization: 'Bearer asdf1234' } } let(:url) { 'https://foo.bar.com/baz' } + let(:upload_method) { nil } + let(:upload_url) { nil } + let(:upload_headers) { {} } + let(:upload_config) { { method: upload_method, headers: upload_headers, url: upload_url }.compact_blank! } - subject { described_class.send_dependency(headers, url) } + subject { described_class.send_dependency(headers, url, upload_config: upload_config) } - it 'sets the header correctly', :aggregate_failures do - key, command, params = decode_workhorse_header(subject) + shared_examples 'setting the header correctly' do |ensure_upload_config_field: nil| + it 'sets the header correctly' do + key, command, params = decode_workhorse_header(subject) + expected_params = { + 'Headers' => headers.transform_values { |v| Array.wrap(v) }, + 'Url' => url, + 'UploadConfig' => { + 'Method' => upload_method, + 'Url' => upload_url, + 'Headers' => upload_headers.transform_values { |v| Array.wrap(v) } + }.compact_blank! + } + expected_params.compact_blank! - expect(key).to eq("Gitlab-Workhorse-Send-Data") - expect(command).to eq("send-dependency") - expect(params).to eq({ - 'Header' => headers, - 'Url' => url - }.deep_stringify_keys) + expect(key).to eq("Gitlab-Workhorse-Send-Data") + expect(command).to eq("send-dependency") + expect(params).to eq(expected_params.deep_stringify_keys) + + expect(params.dig('UploadConfig', ensure_upload_config_field)).to be_present if ensure_upload_config_field + end + end + + it_behaves_like 'setting the header correctly' + + context 'overriding the method' do + let(:upload_method) { 'PUT' } + + it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'Method' + end + + context 'overriding the upload url' do + let(:upload_url) { 'https://test.dev' } + + it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'Url' + end + + context 'with upload headers set' do + let(:upload_headers) { { 'Private-Token' => '1234567890' } } + + it_behaves_like 'setting the header correctly', ensure_upload_config_field: 'Headers' end end diff --git a/spec/lib/gitlab/x509/certificate_spec.rb b/spec/lib/gitlab/x509/certificate_spec.rb index d919b99de2a..a81bdfcbd42 100644 --- a/spec/lib/gitlab/x509/certificate_spec.rb +++ b/spec/lib/gitlab/x509/certificate_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::X509::Certificate do +RSpec.describe Gitlab::X509::Certificate, feature_category: :source_code_management do include SmimeHelper let(:sample_ca_certs_path) { Rails.root.join('spec/fixtures/clusters').to_s } diff --git a/spec/lib/gitlab/x509/commit_sigstore_spec.rb b/spec/lib/gitlab/x509/commit_sigstore_spec.rb new file mode 100644 index 00000000000..7079fa28108 --- /dev/null +++ b/spec/lib/gitlab/x509/commit_sigstore_spec.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::X509::Commit, feature_category: :source_code_management do + let(:commit_sha) { '440bf5b2b499a90d9adcbebe3752f8c6f245a1aa' } + let_it_be(:user) { create(:user, email: X509Helpers::User2.certificate_email) } + let_it_be(:project) { create(:project, :repository, path: X509Helpers::User2.path, creator: user) } + let(:commit) { create(:commit, project: project) } + let(:signature) { described_class.new(commit).signature } + let(:store) { OpenSSL::X509::Store.new } + let(:certificate) { OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert) } + + before do + store.add_cert(certificate) if certificate + allow(OpenSSL::X509::Store).to receive(:new).and_return(store) + end + + describe '#signature' do + context 'on second call' do + it 'returns the cached signature' do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:new).and_call_original + end + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:create_cached_signature!).and_call_original + end + + signature + + # consecutive call + expect(described_class).not_to receive(:create_cached_signature!).and_call_original + signature + end + end + end + + describe '#update_signature!' do + let(:certificate) { nil } + + it 'updates verification status' do + signature + + cert = OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert) + store.add_cert(cert) + + # stored_signature = CommitSignatures::X509CommitSignature.find_by_commit_sha(commit_sha) + # expect { described_class.new(commit).update_signature!(stored_signature) }.to( + # change { signature.reload.verification_status }.from('unverified').to('verified') + # ) # TODO sigstore support pending + end + end +end diff --git a/spec/lib/gitlab/x509/commit_spec.rb b/spec/lib/gitlab/x509/commit_spec.rb index 412fa6e5a7f..2766a1a9bac 100644 --- a/spec/lib/gitlab/x509/commit_spec.rb +++ b/spec/lib/gitlab/x509/commit_spec.rb @@ -1,10 +1,10 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Gitlab::X509::Commit do +RSpec.describe Gitlab::X509::Commit, feature_category: :source_code_management do let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' } - let(:user) { create(:user, email: X509Helpers::User1.certificate_email) } - let(:project) { create(:project, :repository, path: X509Helpers::User1.path, creator: user) } + let_it_be(:user) { create(:user, email: X509Helpers::User1.certificate_email) } + let_it_be(:project) { create(:project, :repository, path: X509Helpers::User1.path, creator: user) } let(:commit) { project.commit_by(oid: commit_sha ) } let(:signature) { described_class.new(commit).signature } let(:store) { OpenSSL::X509::Store.new } diff --git a/spec/lib/gitlab/x509/signature_sigstore_spec.rb b/spec/lib/gitlab/x509/signature_sigstore_spec.rb new file mode 100644 index 00000000000..84962576ea2 --- /dev/null +++ b/spec/lib/gitlab/x509/signature_sigstore_spec.rb @@ -0,0 +1,453 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::X509::Signature, feature_category: :source_code_management do + let(:issuer_attributes) do + { + subject_key_identifier: X509Helpers::User2.issuer_subject_key_identifier, + subject: X509Helpers::User2.certificate_issuer + } + end + + it_behaves_like 'signature with type checking', :x509 do + subject(:signature) do + described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + end + end + + shared_examples "a verified signature" do + let!(:user) { create(:user, email: X509Helpers::User2.certificate_email) } + + subject(:signature) do + described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + end + + it 'returns a verified signature if email does match' do + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey # TODO sigstore support pending + expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending + end + + it 'returns a verified signature if email does match, case-insensitively' do + signature = described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + X509Helpers::User2.certificate_email.upcase, + X509Helpers::User2.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey # TODO sigstore support pending + expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending + end + + context 'when the certificate contains multiple emails' do + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:get_certificate_extension).and_call_original + allow(instance).to receive(:get_certificate_extension) + .with('subjectAltName') + .and_return("email:gitlab2@example.com, othername:, email:#{ + X509Helpers::User2.certificate_email + }") + end + end + + context 'and the email matches one of them' do + it 'returns a verified signature' do + expect(signature.x509_certificate).to have_attributes(certificate_attributes.except(:email, :emails)) + expect(signature.x509_certificate.email).to eq('gitlab2@example.com') + expect(signature.x509_certificate.emails).to contain_exactly('gitlab2@example.com', + X509Helpers::User2.certificate_email) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey # TODO sigstore support pending + expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending + end + end + end + + context "if the email matches but isn't confirmed" do + let!(:user) { create(:user, :unconfirmed, email: X509Helpers::User2.certificate_email) } + + it "returns an unverified signature" do + expect(signature.verification_status).to eq(:unverified) + end + end + + it 'returns an unverified signature if email does not match' do + signature = described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + "gitlab@example.com", + X509Helpers::User2.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey # TODO sigstore support pending + expect(signature.verification_status).to eq(:unverified) + end + + it 'returns an unverified signature if email does match and time is wrong' do + signature = described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + X509Helpers::User2.certificate_email, + Time.zone.local(2020, 2, 22) + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + + it 'returns an unverified signature if certificate is revoked' do + expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending + + signature.x509_certificate.revoked! + + expect(signature.verification_status).to eq(:unverified) + end + end + + context 'with commit signature' do + let(:certificate_attributes) do + { + subject_key_identifier: X509Helpers::User2.certificate_subject_key_identifier, + subject: X509Helpers::User2.certificate_subject, + email: X509Helpers::User2.certificate_email, + emails: [X509Helpers::User2.certificate_email], + serial_number: X509Helpers::User2.certificate_serial + } + end + + context 'with verified signature' do + context 'with trusted certificate store' do + before do + store = OpenSSL::X509::Store.new + certificate = OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert) + store.add_cert(certificate) + allow(OpenSSL::X509::Store).to receive(:new).and_return(store) + end + + it_behaves_like "a verified signature" + end + + context 'with the certificate defined by OpenSSL::X509::DEFAULT_CERT_FILE' do + before do + store = OpenSSL::X509::Store.new + certificate = OpenSSL::X509::Certificate.new(X509Helpers::User2.trust_cert) + file_path = Rails.root.join("tmp/cert.pem").to_s + + File.open(file_path, "wb") do |f| + f.print certificate.to_pem + end + + allow(Gitlab::X509::Certificate).to receive(:default_cert_file).and_return(file_path) + + allow(OpenSSL::X509::Store).to receive(:new).and_return(store) + end + + it_behaves_like "a verified signature" + end + + context 'without trusted certificate within store' do + before do + store = OpenSSL::X509::Store.new + allow(OpenSSL::X509::Store).to receive(:new) + .and_return( + store + ) + end + + it 'returns an unverified signature' do + signature = described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + end + + context 'with invalid signature' do + it 'returns nil' do + signature = described_class.new( + X509Helpers::User2.signed_commit_signature.tr('A', 'B'), + X509Helpers::User2.signed_commit_base_data, + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + expect(signature.x509_certificate).to be_nil + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + + context 'with invalid commit message' do + it 'returns nil' do + signature = described_class.new( + X509Helpers::User2.signed_commit_signature, + 'x', + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + expect(signature.x509_certificate).to be_nil + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + end + + context 'with email' do + describe 'subjectAltName with email, othername' do + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:get_certificate_extension).and_call_original + allow(instance).to receive(:get_certificate_extension) + .with('subjectAltName') + .and_return("email:gitlab@example.com, othername:") + end + end + + let(:signature) do + described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + 'gitlab@example.com', + X509Helpers::User2.signed_commit_time + ) + end + + it 'extracts email' do + expect(signature.x509_certificate.email).to eq("gitlab@example.com") + expect(signature.x509_certificate.emails).to contain_exactly("gitlab@example.com") + end + + context 'when there are multiple emails' do + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:get_certificate_extension).and_call_original + allow(instance).to receive(:get_certificate_extension) + .with('subjectAltName') + .and_return("email:gitlab@example.com, othername:, email:gitlab2@example.com") + end + end + + it 'extracts all the emails' do + expect(signature.x509_certificate.email).to eq("gitlab@example.com") + expect(signature.x509_certificate.emails).to contain_exactly("gitlab@example.com", "gitlab2@example.com") + end + end + end + + describe 'subjectAltName with othername, email' do + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:get_certificate_extension).and_call_original + end + + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:get_certificate_extension).and_call_original + allow(instance).to receive(:get_certificate_extension) + .with('subjectAltName') + .and_return("othername:, email:gitlab@example.com") + end + end + + it 'extracts email' do + signature = described_class.new( + X509Helpers::User2.signed_commit_signature, + X509Helpers::User2.signed_commit_base_data, + 'gitlab@example.com', + X509Helpers::User2.signed_commit_time + ) + + expect(signature.x509_certificate.email).to eq("gitlab@example.com") + end + end + end + + describe '#signed_by_user' do + subject do + described_class.new( + X509Helpers::User2.signed_tag_signature, + X509Helpers::User2.signed_tag_base_data, + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ).signed_by_user + end + + context 'if email is assigned to a user' do + let!(:signed_by_user) { create(:user, email: X509Helpers::User2.certificate_email) } + + it 'returns user' do + is_expected.to eq(signed_by_user) + end + end + + it 'if email is not assigned to a user, return nil' do + is_expected.to be_nil + end + end + + context 'with tag signature' do + let(:certificate_attributes) do + { + subject_key_identifier: X509Helpers::User2.tag_certificate_subject_key_identifier, + subject: X509Helpers::User2.certificate_subject, + email: X509Helpers::User2.certificate_email, + emails: [X509Helpers::User2.certificate_email], + serial_number: X509Helpers::User2.tag_certificate_serial + } + end + + let(:issuer_attributes) do + { + subject_key_identifier: X509Helpers::User2.tag_issuer_subject_key_identifier, + subject: X509Helpers::User2.tag_certificate_issuer + } + end + + context 'with verified signature' do + let_it_be(:user) { create(:user, :unconfirmed, email: X509Helpers::User2.certificate_email) } + + subject(:signature) do + described_class.new( + X509Helpers::User2.signed_tag_signature, + X509Helpers::User2.signed_tag_base_data, + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + end + + context 'with trusted certificate store' do + before do + store = OpenSSL::X509::Store.new + certificate = OpenSSL::X509::Certificate.new X509Helpers::User2.trust_cert + store.add_cert(certificate) + allow(OpenSSL::X509::Store).to receive(:new).and_return(store) + end + + context 'when user email is confirmed' do + before_all do + user.confirm + end + + it 'returns a verified signature if email does match', :ggregate_failures do + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey # TODO sigstore support pending + expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending + end + + it 'returns an unverified signature if email does not match', :aggregate_failures do + signature = described_class.new( + X509Helpers::User2.signed_tag_signature, + X509Helpers::User2.signed_tag_base_data, + "gitlab@example.com", + X509Helpers::User2.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey # TODO sigstore support pending + expect(signature.verification_status).to eq(:unverified) + end + + it 'returns an unverified signature if email does match and time is wrong', :aggregate_failures do + signature = described_class.new( + X509Helpers::User2.signed_tag_signature, + X509Helpers::User2.signed_tag_base_data, + X509Helpers::User2.certificate_email, + Time.zone.local(2020, 2, 22) + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + + it 'returns an unverified signature if certificate is revoked' do + expect(signature.verification_status).to eq(:unverified) # TODO sigstore support pending + + signature.x509_certificate.revoked! + + expect(signature.verification_status).to eq(:unverified) + end + end + + it 'returns an unverified signature if the email matches but is not confirmed' do + expect(signature.verification_status).to eq(:unverified) + end + end + + context 'without trusted certificate within store' do + before do + store = OpenSSL::X509::Store.new + allow(OpenSSL::X509::Store).to receive(:new) + .and_return( + store + ) + end + + it 'returns an unverified signature' do + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + end + + context 'with invalid signature' do + it 'returns nil' do + signature = described_class.new( + X509Helpers::User2.signed_tag_signature.tr('A', 'B'), + X509Helpers::User2.signed_tag_base_data, + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + expect(signature.x509_certificate).to be_nil + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + + context 'with invalid message' do + it 'returns nil' do + signature = described_class.new( + X509Helpers::User2.signed_tag_signature, + 'x', + X509Helpers::User2.certificate_email, + X509Helpers::User2.signed_commit_time + ) + expect(signature.x509_certificate).to be_nil + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end + end + end +end diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb index e0823aa8153..8043cefe888 100644 --- a/spec/lib/gitlab/x509/signature_spec.rb +++ b/spec/lib/gitlab/x509/signature_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::X509::Signature do +RSpec.describe Gitlab::X509::Signature, feature_category: :source_code_management do let(:issuer_attributes) do { subject_key_identifier: X509Helpers::User1.issuer_subject_key_identifier, diff --git a/spec/lib/gitlab/x509/tag_sigstore_spec.rb b/spec/lib/gitlab/x509/tag_sigstore_spec.rb new file mode 100644 index 00000000000..3cf864ea442 --- /dev/null +++ b/spec/lib/gitlab/x509/tag_sigstore_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::X509::Tag, feature_category: :source_code_management do + describe '#signature' do + let(:tag_id) { 'v1.1.2' } + let(:tag) { instance_double('Gitlab::Git::Tag') } + let_it_be(:user) { create(:user, email: X509Helpers::User2.tag_email) } + let_it_be(:project) { create(:project, path: X509Helpers::User2.path, creator: user) } + let(:signature) { described_class.new(project.repository, tag).signature } + + before do + allow(tag).to receive(:id).and_return(tag_id) + allow(tag).to receive(:has_signature?).and_return(true) + allow(tag).to receive(:user_email).and_return(user.email) + allow(tag).to receive(:date).and_return(X509Helpers::User2.signed_tag_time) + allow(Gitlab::Git::Tag).to receive(:extract_signature_lazily).with(project.repository, tag_id) + .and_return([X509Helpers::User2.signed_tag_signature, X509Helpers::User2.signed_tag_base_data]) + end + + describe 'signed tag' do + let(:certificate_attributes) do + { + subject_key_identifier: X509Helpers::User2.tag_certificate_subject_key_identifier, + subject: X509Helpers::User2.certificate_subject, + email: X509Helpers::User2.certificate_email, + serial_number: X509Helpers::User2.tag_certificate_serial + } + end + + let(:issuer_attributes) do + { + subject_key_identifier: X509Helpers::User2.tag_issuer_subject_key_identifier, + subject: X509Helpers::User2.tag_certificate_issuer + } + end + + it { expect(signature).not_to be_nil } + it { expect(signature.verification_status).to eq(:unverified) } + it { expect(signature.x509_certificate).to have_attributes(certificate_attributes) } + it { expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) } + end + end +end diff --git a/spec/lib/gitlab/x509/tag_spec.rb b/spec/lib/gitlab/x509/tag_spec.rb index e20ef688db5..4368c3d7a4b 100644 --- a/spec/lib/gitlab/x509/tag_spec.rb +++ b/spec/lib/gitlab/x509/tag_spec.rb @@ -1,15 +1,24 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Gitlab::X509::Tag do - subject(:signature) { described_class.new(project.repository, tag).signature } - +RSpec.describe Gitlab::X509::Tag, feature_category: :source_code_management do describe '#signature' do - let_it_be(:project) { create(:project, :repository) } - let_it_be(:repository) { project.repository.raw } + let(:tag_id) { 'v1.1.1' } + let(:tag) { instance_double('Gitlab::Git::Tag') } + let_it_be(:user) { create(:user, email: X509Helpers::User1.tag_email) } + let_it_be(:project) { create(:project, path: X509Helpers::User1.path, creator: user) } + let(:signature) { described_class.new(project.repository, tag).signature } + + before do + allow(tag).to receive(:id).and_return(tag_id) + allow(tag).to receive(:has_signature?).and_return(true) + allow(tag).to receive(:user_email).and_return(user.email) + allow(tag).to receive(:date).and_return(X509Helpers::User1.signed_tag_time) + allow(Gitlab::Git::Tag).to receive(:extract_signature_lazily).with(project.repository, tag_id) + .and_return([X509Helpers::User1.signed_tag_signature, X509Helpers::User1.signed_tag_base_data]) + end describe 'signed tag' do - let(:tag) { project.repository.find_tag('v1.1.1') } let(:certificate_attributes) do { subject_key_identifier: X509Helpers::User1.tag_certificate_subject_key_identifier, @@ -32,11 +41,5 @@ RSpec.describe Gitlab::X509::Tag do it { expect(signature.x509_certificate).to have_attributes(certificate_attributes) } it { expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) } end - - describe 'unsigned tag' do - let(:tag) { project.repository.find_tag('v1.0.0') } - - it { expect(signature).to be_nil } - end end end diff --git a/spec/lib/peek/views/click_house_spec.rb b/spec/lib/peek/views/click_house_spec.rb index 9d7d06204fc..1ff49afd728 100644 --- a/spec/lib/peek/views/click_house_spec.rb +++ b/spec/lib/peek/views/click_house_spec.rb @@ -16,9 +16,15 @@ RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_ca data = ClickHouse::Client.select('SELECT 1 AS value', :main) ClickHouse::Client.execute('INSERT INTO events (id) VALUES (1)', :main) + Tempfile.open(['test', '.csv.gz']) do |f| + File.binwrite(f.path, ActiveSupport::Gzip.compress("id\n10\n20")) + + ClickHouse::Client.insert_csv('INSERT INTO events (id) FORMAT CSV', File.open(f.path), :main) + end + expect(data).to eq([{ 'value' => 1 }]) - expect(results[:calls]).to eq(2) + expect(results[:calls]).to eq(3) expect(results[:duration]).to be_kind_of(String) expect(results[:details]).to match_array([ @@ -30,6 +36,11 @@ RSpec.describe Peek::Views::ClickHouse, :click_house, :request_store, feature_ca sql: 'INSERT INTO events (id) VALUES (1)', database: 'database: main', statistics: include('written_rows=>"1"') + }), + a_hash_including({ + sql: 'INSERT INTO events (id) FORMAT CSV', + database: 'database: main', + statistics: include('written_rows=>"2"') }) ]) end diff --git a/spec/lib/sidebars/admin/panel_spec.rb b/spec/lib/sidebars/admin/panel_spec.rb index 9c362f527f5..83ad867050c 100644 --- a/spec/lib/sidebars/admin/panel_spec.rb +++ b/spec/lib/sidebars/admin/panel_spec.rb @@ -18,14 +18,10 @@ RSpec.describe Sidebars::Admin::Panel, feature_category: :navigation do describe '#super_sidebar_context_header' do it 'returns a hash with the correct title and icon' do - expected_header = { - title: panel.aria_label, - icon: 'admin' - } - - expect(panel.super_sidebar_context_header).to eq(expected_header) + expect(panel.super_sidebar_context_header).to eq(_('Admin Area')) end end it_behaves_like 'a panel with uniquely identifiable menu items' + it_behaves_like 'a panel instantiable by the anonymous user' end diff --git a/spec/lib/sidebars/concerns/has_avatar_spec.rb b/spec/lib/sidebars/concerns/has_avatar_spec.rb new file mode 100644 index 00000000000..bc9038c216e --- /dev/null +++ b/spec/lib/sidebars/concerns/has_avatar_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Sidebars::Concerns::HasAvatar, feature_category: :navigation do + subject do + Class.new do + include Sidebars::Concerns::HasAvatar + end.new + end + + describe '#avatar' do + it 'returns nil' do + expect(subject.avatar).to be_nil + end + end + + describe '#avatar_shape' do + it 'returns rect' do + expect(subject.avatar_shape).to eq('rect') + end + end + + describe '#entity_id' do + it 'returns nil' do + expect(subject.entity_id).to be_nil + end + end +end diff --git a/spec/lib/sidebars/explore/panel_spec.rb b/spec/lib/sidebars/explore/panel_spec.rb new file mode 100644 index 00000000000..b3030dfe2e4 --- /dev/null +++ b/spec/lib/sidebars/explore/panel_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Explore::Panel, feature_category: :navigation do + let(:user) { build_stubbed(:user) } + + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + subject { described_class.new(context) } + + it_behaves_like 'a panel with uniquely identifiable menu items' + + it 'implements #super_sidebar_context_header' do + expect(subject.super_sidebar_context_header).to eq(_('Explore')) + end +end diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb index 382ee07e458..713e22e2e76 100644 --- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb +++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb @@ -24,29 +24,16 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu, feature_category expect(menu.render?).to eq true end end - - context 'when menu does not have any menu item to show' do - it 'returns false' do - stub_feature_flags(harbor_registry_integration: false) - stub_container_registry_config(enabled: false) - stub_config(packages: { enabled: false }) - stub_config(dependency_proxy: { enabled: false }) - - expect(menu.render?).to eq false - end - end end describe '#link' do let(:registry_enabled) { true } let(:packages_enabled) { true } - let(:harbor_registry_integration) { true } before do stub_container_registry_config(enabled: registry_enabled) stub_config(packages: { enabled: packages_enabled }) stub_config(dependency_proxy: { enabled: true }) - stub_feature_flags(harbor_registry_integration: harbor_registry_integration) end subject { menu.link } @@ -70,14 +57,6 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu, feature_category it 'menu link points to Harbor Registry page' do expect(subject).to eq find_menu(menu, :harbor_registry).link end - - context 'when Harbor Registry is not visible' do - let(:harbor_registry_integration) { false } - - it 'menu link points to Dependency Proxy page' do - expect(subject).to eq find_menu(menu, :dependency_proxy).link - end - end end end end @@ -194,29 +173,13 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu, feature_category describe 'Harbor Registry' do let(:item_id) { :harbor_registry } - before do - stub_feature_flags(harbor_registry_integration: harbor_registry_enabled) - end - - context 'when config harbor registry setting is disabled' do - let(:harbor_registry_enabled) { false } - - it_behaves_like 'the menu entry is not available' - end - - context 'when config harbor registry setting is enabled' do - let(:harbor_registry_enabled) { true } - - it_behaves_like 'the menu entry is available' - end + it_behaves_like 'the menu entry is available' context 'when config harbor registry setting is not activated' do before do harbor_integration.update!(active: false) end - let(:harbor_registry_enabled) { true } - it_behaves_like 'the menu entry is not available' end end diff --git a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb index d3aceaf422b..2cce2d28e68 100644 --- a/spec/lib/sidebars/groups/menus/scope_menu_spec.rb +++ b/spec/lib/sidebars/groups/menus/scope_menu_spec.rb @@ -17,9 +17,10 @@ RSpec.describe Sidebars::Groups::Menus::ScopeMenu, feature_category: :navigation it_behaves_like 'serializable as super_sidebar_menu_args' do let(:extra_attrs) do { - sprite_icon: 'group', super_sidebar_parent: ::Sidebars::StaticMenu, - title: _('Group overview'), + title: group.name, + avatar: group.avatar_url, + entity_id: group.id, item_id: :group_overview } end diff --git a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb index 52c3a35a9d7..c939dd870c4 100644 --- a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb +++ b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb @@ -20,12 +20,7 @@ RSpec.describe Sidebars::Groups::SuperSidebarPanel, feature_category: :navigatio subject { described_class.new(context) } it 'implements #super_sidebar_context_header' do - expect(subject.super_sidebar_context_header).to eq( - { - title: group.name, - avatar: group.avatar_url, - id: group.id - }) + expect(subject.super_sidebar_context_header).to eq(_('Group')) end describe '#renderable_menus' do @@ -53,4 +48,5 @@ RSpec.describe Sidebars::Groups::SuperSidebarPanel, feature_category: :navigatio it_behaves_like 'a panel with uniquely identifiable menu items' it_behaves_like 'a panel with all menu_items categorized' + it_behaves_like 'a panel instantiable by the anonymous user' end diff --git a/spec/lib/sidebars/menu_item_spec.rb b/spec/lib/sidebars/menu_item_spec.rb index 3ff5b80e5d9..7f67b5a2e8d 100644 --- a/spec/lib/sidebars/menu_item_spec.rb +++ b/spec/lib/sidebars/menu_item_spec.rb @@ -5,7 +5,8 @@ require 'fast_spec_helper' RSpec.describe Sidebars::MenuItem, feature_category: :navigation do let(:title) { 'foo' } let(:html_options) { {} } - let(:menu_item) { described_class.new(title: title, active_routes: {}, link: '', container_html_options: html_options) } + let(:extra) { {} } + let(:menu_item) { described_class.new(title: title, active_routes: {}, link: '', container_html_options: html_options, **extra) } it 'includes by default aria-label attribute set to the title' do expect(menu_item.container_html_options).to eq({ aria: { label: title } }) @@ -21,11 +22,17 @@ RSpec.describe Sidebars::MenuItem, feature_category: :navigation do describe "#serialize_for_super_sidebar" do let(:html_options) { { class: 'custom-class' } } + let(:extra) { { avatar: '/avatar.png', entity_id: 123 } } subject { menu_item.serialize_for_super_sidebar } it 'includes custom CSS classes' do expect(subject[:link_classes]).to be('custom-class') end + + it 'includes avatar data' do + expect(subject[:avatar]).to be('/avatar.png') + expect(subject[:entity_id]).to be(123) + end end end diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb index 00202ac7d2b..e59a8cd2163 100644 --- a/spec/lib/sidebars/menu_spec.rb +++ b/spec/lib/sidebars/menu_spec.rb @@ -33,6 +33,8 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do item_id: 'id1', title: 'Is active', link: 'foo2', + avatar: '/avatar.png', + entity_id: 123, active_routes: { controller: 'fooc' } )) menu.add_item(Sidebars::MenuItem.new( @@ -51,6 +53,9 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do { title: "Title", icon: nil, + avatar: nil, + avatar_shape: 'rect', + entity_id: nil, link: "foo2", is_active: true, pill_count: nil, @@ -60,6 +65,8 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do id: 'id1', title: "Is active", icon: nil, + avatar: '/avatar.png', + entity_id: 123, link: "foo2", is_active: true, pill_count: nil, @@ -69,6 +76,8 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do id: 'id2', title: "Not active", icon: nil, + avatar: nil, + entity_id: nil, link: "foo3", is_active: false, pill_count: 10, @@ -85,6 +94,9 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do { title: "Title", icon: nil, + avatar: nil, + avatar_shape: 'rect', + entity_id: nil, link: nil, is_active: false, pill_count: 'foo', diff --git a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb index bc03787e95f..999889a72ee 100644 --- a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb +++ b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb @@ -11,8 +11,8 @@ RSpec.describe Sidebars::Organizations::Menus::ScopeMenu, feature_category: :nav let(:menu) { described_class.new(context) } let(:extra_attrs) do { - title: s_('Organization|Organization overview'), - sprite_icon: 'organization', + avatar: nil, + entity_id: organization.id, super_sidebar_parent: ::Sidebars::StaticMenu, item_id: :organization_overview } diff --git a/spec/lib/sidebars/organizations/panel_spec.rb b/spec/lib/sidebars/organizations/panel_spec.rb index 1f0b8d72aef..edaa676aa41 100644 --- a/spec/lib/sidebars/organizations/panel_spec.rb +++ b/spec/lib/sidebars/organizations/panel_spec.rb @@ -14,4 +14,5 @@ RSpec.describe Sidebars::Organizations::Panel, feature_category: :navigation do end it_behaves_like 'a panel with uniquely identifiable menu items' + it_behaves_like 'a panel instantiable by the anonymous user' end diff --git a/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb b/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb index 99b33a5edf8..b8ceda615c4 100644 --- a/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb +++ b/spec/lib/sidebars/organizations/super_sidebar_panel_spec.rb @@ -15,11 +15,7 @@ RSpec.describe Sidebars::Organizations::SuperSidebarPanel, feature_category: :na subject { described_class.new(context) } it 'implements #super_sidebar_context_header' do - expect(subject.super_sidebar_context_header).to eq( - { - title: organization.name, - id: organization.id - }) + expect(subject.super_sidebar_context_header).to eq(s_('Organization|Organization')) end describe '#renderable_menus' do @@ -36,4 +32,5 @@ RSpec.describe Sidebars::Organizations::SuperSidebarPanel, feature_category: :na end it_behaves_like 'a panel with uniquely identifiable menu items' + it_behaves_like 'a panel instantiable by the anonymous user' end diff --git a/spec/lib/sidebars/panel_spec.rb b/spec/lib/sidebars/panel_spec.rb index 857cb1139b5..e4b3b973484 100644 --- a/spec/lib/sidebars/panel_spec.rb +++ b/spec/lib/sidebars/panel_spec.rb @@ -46,17 +46,25 @@ RSpec.describe Sidebars::Panel, feature_category: :navigation do end end - describe '#has_renderable_menus?' do - it 'returns false when no renderable menus' do - expect(panel.has_renderable_menus?).to be false + describe '#render?' do + it 'returns false with no menus' do + expect(panel.render?).to be false end - it 'returns true when no renderable menus' do + it 'returns false with no renderable menus' do + allow(menu1).to receive(:render?).and_return(false) + + panel.add_menu(menu1) + + expect(panel.render?).to be false + end + + it 'returns true with renderable menus' do allow(menu1).to receive(:render?).and_return(true) panel.add_menu(menu1) - expect(panel.has_renderable_menus?).to be true + expect(panel.render?).to be true end end diff --git a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb index 53d92d013a9..91913e5b733 100644 --- a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb @@ -14,6 +14,7 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu, feature_category: :navigat let(:extra_attrs) do { item_id: :project_issue_list, + active_routes: { path: %w[projects/issues#index projects/issues#show projects/issues#new] }, pill_count: menu.pill_count, has_pill: menu.has_pill?, super_sidebar_parent: Sidebars::Projects::SuperSidebarMenus::PlanMenu diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb index c0787aa9db5..f1df56823b1 100644 --- a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb @@ -88,19 +88,5 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu, feature_category: :naviga it_behaves_like 'access rights checks' end - - describe 'Tracing' do - let(:item_id) { :tracing } - - specify { is_expected.not_to be_nil } - - describe 'when feature is disabled' do - before do - stub_feature_flags(observability_tracing: false) - end - - specify { is_expected.to be_nil } - end - end end end diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb index b917208bac1..0cf95391a26 100644 --- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb @@ -39,7 +39,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego before do stub_container_registry_config(enabled: registry_enabled) stub_config(packages: { enabled: packages_enabled }) - stub_feature_flags(harbor_registry_integration: false, ml_experiment_tracking: false) + stub_feature_flags(ml_experiment_tracking: false) end context 'when Packages Registry is visible' do @@ -58,8 +58,8 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego context 'when Container Registry is not visible' do let(:registry_enabled) { false } - it 'does not display menu link' do - expect(subject.render?).to eq false + it 'displays menu link' do + expect(subject.render?).to eq true end end end @@ -155,26 +155,13 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego describe 'Harbor Registry' do let(:item_id) { :harbor_registry } - context 'when config harbor registry setting is disabled' do - it 'does not add the menu item to the list' do - stub_feature_flags(harbor_registry_integration: false) - - is_expected.to be_nil - end - end - - context 'when config harbor registry setting is enabled' do - it 'the menu item is added to list of menu items' do - stub_feature_flags(harbor_registry_integration: true) - - is_expected.not_to be_nil - expect(subject.active_routes[:controller]).to eq('projects/harbor/repositories') - end + it 'the menu item is added to list of menu items' do + is_expected.not_to be_nil + expect(subject.active_routes[:controller]).to eq('projects/harbor/repositories') end context 'when config harbor registry setting is not activated' do it 'does not add the menu item to the list' do - stub_feature_flags(harbor_registry_integration: true) project.harbor_integration.update!(active: false) is_expected.to be_nil diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb index 45464278880..1c2d159950a 100644 --- a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb @@ -11,8 +11,9 @@ RSpec.describe Sidebars::Projects::Menus::ScopeMenu, feature_category: :navigati let(:menu) { described_class.new(context) } let(:extra_attrs) do { - title: _('Project overview'), - sprite_icon: 'project', + title: project.name, + avatar: project.avatar_url, + entity_id: project.id, super_sidebar_parent: ::Sidebars::StaticMenu, item_id: :project_overview } diff --git a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb index 3fc6cd5083f..dc264c1c14f 100644 --- a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb +++ b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb @@ -31,12 +31,7 @@ RSpec.describe Sidebars::Projects::SuperSidebarPanel, feature_category: :navigat end it 'implements #super_sidebar_context_header' do - expect(subject.super_sidebar_context_header).to eq( - { - title: project.name, - avatar: project.avatar_url, - id: project.id - }) + expect(subject.super_sidebar_context_header).to eq(_('Project')) end describe '#renderable_menus' do @@ -64,4 +59,5 @@ RSpec.describe Sidebars::Projects::SuperSidebarPanel, feature_category: :navigat it_behaves_like 'a panel with uniquely identifiable menu items' it_behaves_like 'a panel with all menu_items categorized' + it_behaves_like 'a panel instantiable by the anonymous user' end diff --git a/spec/lib/sidebars/search/panel_spec.rb b/spec/lib/sidebars/search/panel_spec.rb index 39c0f112793..fa1b4266a2f 100644 --- a/spec/lib/sidebars/search/panel_spec.rb +++ b/spec/lib/sidebars/search/panel_spec.rb @@ -12,6 +12,7 @@ RSpec.describe Sidebars::Search::Panel, feature_category: :navigation do subject { described_class.new(context) } it_behaves_like 'a panel with uniquely identifiable menu items' + it_behaves_like 'a panel instantiable by the anonymous user' describe '#aria_label' do it 'returns the correct aria label' do @@ -21,11 +22,7 @@ RSpec.describe Sidebars::Search::Panel, feature_category: :navigation do describe '#super_sidebar_context_header' do it 'returns a hash with the correct title and icon' do - expected_header = { - title: 'Search results', - icon: 'search-results' - } - expect(panel.super_sidebar_context_header).to eq(expected_header) + expect(panel.super_sidebar_context_header).to eq(_('Search results')) end end end diff --git a/spec/lib/sidebars/static_menu_spec.rb b/spec/lib/sidebars/static_menu_spec.rb index 3d9feee0494..fda953c0791 100644 --- a/spec/lib/sidebars/static_menu_spec.rb +++ b/spec/lib/sidebars/static_menu_spec.rb @@ -23,6 +23,8 @@ RSpec.describe Sidebars::StaticMenu, feature_category: :navigation do id: 'id1', title: "Is active", icon: nil, + avatar: nil, + entity_id: nil, link: "foo2", is_active: true, pill_count: nil, @@ -32,6 +34,8 @@ RSpec.describe Sidebars::StaticMenu, feature_category: :navigation do id: 'id2', title: "Not active", icon: nil, + avatar: nil, + entity_id: nil, link: "foo3", is_active: false, pill_count: nil, diff --git a/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb b/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb index 7cf86676892..ef12ce023b4 100644 --- a/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb +++ b/spec/lib/sidebars/user_profile/menus/overview_menu_spec.rb @@ -4,8 +4,9 @@ require 'spec_helper' RSpec.describe Sidebars::UserProfile::Menus::OverviewMenu, feature_category: :navigation do it_behaves_like 'User profile menu', - title: s_('UserProfile|Overview'), - icon: 'overview', + icon: nil, + expect_avatar: true, + avatar_shape: 'circle', active_route: 'users#show' do let(:link) { "/#{user.username}" } end diff --git a/spec/lib/sidebars/user_profile/panel_spec.rb b/spec/lib/sidebars/user_profile/panel_spec.rb index a2bf490bc58..97fe13397a9 100644 --- a/spec/lib/sidebars/user_profile/panel_spec.rb +++ b/spec/lib/sidebars/user_profile/panel_spec.rb @@ -11,16 +11,13 @@ RSpec.describe Sidebars::UserProfile::Panel, feature_category: :navigation do subject { described_class.new(context) } it_behaves_like 'a panel with uniquely identifiable menu items' + it_behaves_like 'a panel instantiable by the anonymous user' it 'implements #aria_label' do expect(subject.aria_label).to eq(s_('UserProfile|User profile navigation')) end it 'implements #super_sidebar_context_header' do - expect(subject.super_sidebar_context_header).to eq({ - title: user.name, - avatar: user.avatar_url, - avatar_shape: 'circle' - }) + expect(subject.super_sidebar_context_header).to eq(_('Profile')) end end diff --git a/spec/lib/sidebars/user_settings/panel_spec.rb b/spec/lib/sidebars/user_settings/panel_spec.rb index d574652188d..e65717d75d6 100644 --- a/spec/lib/sidebars/user_settings/panel_spec.rb +++ b/spec/lib/sidebars/user_settings/panel_spec.rb @@ -10,8 +10,9 @@ RSpec.describe Sidebars::UserSettings::Panel, feature_category: :navigation do subject { described_class.new(context) } it_behaves_like 'a panel with uniquely identifiable menu items' + it_behaves_like 'a panel instantiable by the anonymous user' it 'implements #super_sidebar_context_header' do - expect(subject.super_sidebar_context_header).to eq({ title: _('User settings'), avatar: user.avatar_url }) + expect(subject.super_sidebar_context_header).to eq(_('User settings')) end end diff --git a/spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb b/spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb new file mode 100644 index 00000000000..304725ce8ca --- /dev/null +++ b/spec/lib/sidebars/your_work/menus/organizations_menu_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::YourWork::Menus::OrganizationsMenu, feature_category: :navigation do + let(:user) { build_stubbed(:user) } + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + subject { described_class.new(context) } + + describe '#render?' do + context 'when `ui_for_organizations` feature flag is enabled' do + context 'when `current_user` is available' do + before do + stub_feature_flags(ui_for_organizations: [user]) + end + + it 'returns true' do + expect(subject.render?).to eq true + end + end + + context 'when `current_user` is not available' do + let(:user) { nil } + + it 'returns false' do + expect(subject.render?).to eq false + end + end + end + + context 'when `ui_for_organizations` feature flag is disabled' do + before do + stub_feature_flags(ui_for_organizations: false) + end + + it 'returns false' do + expect(subject.render?).to eq false + end + end + end +end diff --git a/spec/lib/sidebars/your_work/panel_spec.rb b/spec/lib/sidebars/your_work/panel_spec.rb index 65c2786a16d..8037f7eb7c1 100644 --- a/spec/lib/sidebars/your_work/panel_spec.rb +++ b/spec/lib/sidebars/your_work/panel_spec.rb @@ -10,8 +10,9 @@ RSpec.describe Sidebars::YourWork::Panel, feature_category: :navigation do subject { described_class.new(context) } it_behaves_like 'a panel with uniquely identifiable menu items' + it_behaves_like 'a panel instantiable by the anonymous user' it 'implements #super_sidebar_context_header' do - expect(subject.super_sidebar_context_header).to eq({ title: 'Your work', icon: 'work' }) + expect(subject.super_sidebar_context_header).to eq(_('Your work')) end end diff --git a/spec/lib/system_check/app/table_truncate_check_spec.rb b/spec/lib/system_check/app/table_truncate_check_spec.rb new file mode 100644 index 00000000000..673365f3e5e --- /dev/null +++ b/spec/lib/system_check/app/table_truncate_check_spec.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe SystemCheck::App::TableTruncateCheck, feature_category: :cell do + context 'when running on single databases' do + before do + skip_if_database_exists(:ci) + end + + describe '#skip?' do + subject { described_class.new.skip? } + + it { is_expected.to eq(true) } + end + end + + context 'when running on multiple databases' do + let(:needs_truncation) { true } + + before do + skip_if_shared_database(:ci) + + allow_next_instances_of(Gitlab::Database::TablesTruncate, 2) do |instance| + allow(instance).to receive(:needs_truncation?).and_return(needs_truncation) + end + end + + describe '#skip?' do + subject { described_class.new.skip? } + + it { is_expected.to eq(false) } + end + + describe '#check?' do + subject { described_class.new.check? } + + context 'when TableTruncate returns false' do + let(:needs_truncation) { false } + + it { is_expected.to eq(true) } + end + + context 'when TableTruncate returns true' do + let(:needs_truncation) { true } + + it { is_expected.to eq(false) } + end + end + + describe '#show_error' do + let(:needs_truncation) { true } + let(:checker) { described_class.new } + + before do + checker.check? + end + + subject(:show_error) { checker.show_error } + + it 'outputs error information' do + expected = %r{ + Try\sfixing\sit:\s+ + sudo\s-u\s.+?\s-H\sbundle\sexec\srake\sgitlab:db:truncate_legacy_tables:main\s + gitlab:db:truncate_legacy_tables:ci\s+ + For\smore\sinformation\ssee:\s+ + doc/development/database/multiple_databases.md\sin\ssection\s'Truncating\stables'\s+ + Please\sfix\sthe\serror\sabove\sand\srerun\sthe\schecks.\s+ + }x + + expect { show_error }.to output(expected).to_stdout + end + end + end +end diff --git a/spec/lib/unnested_in_filters/rewriter_spec.rb b/spec/lib/unnested_in_filters/rewriter_spec.rb index e094563e8fb..ea561c42993 100644 --- a/spec/lib/unnested_in_filters/rewriter_spec.rb +++ b/spec/lib/unnested_in_filters/rewriter_spec.rb @@ -68,92 +68,92 @@ RSpec.describe UnnestedInFilters::Rewriter do describe '#rewrite' do let(:recorded_queries) { ActiveRecord::QueryRecorder.new { rewriter.rewrite.load } } let(:relation) { User.where(state: :active, user_type: %i(support_bot alert_bot)).limit(2) } + let(:users_select) { 'SELECT "users".*' } + let(:users_select_with_ignored_columns) { 'SELECT ("users"."\w+", )+("users"."\w+")' } - let(:expected_query) do - <<~SQL - SELECT - "users".* - FROM - unnest('{1,2}'::smallint[]) AS "user_types"("user_type"), - LATERAL ( - SELECT - "users".* - FROM - "users" - WHERE - "users"."state" = 'active' AND - (users."user_type" = "user_types"."user_type") - LIMIT 2 - ) AS users - LIMIT 2 - SQL + let(:users_unnest) do + 'FROM unnest\(\'{1\,2}\'::smallint\[\]\) AS "user_types"\("user_type"\)\, LATERAL \(' + end + + let(:users_where) do + 'FROM + "users" + WHERE + "users"."state" = \'active\' AND + \(users."user_type" = "user_types"."user_type"\) + LIMIT 2\) + AS users + LIMIT 2' + end + + let(:expected_query_regexp) do + Regexp.new( + "(#{users_select}|#{users_select_with_ignored_columns}) + #{users_unnest}(#{users_select}|#{users_select_with_ignored_columns}) + #{users_where}".squish + ) end subject(:issued_query) { recorded_queries.occurrences.each_key.first } it 'changes the query' do - expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, '')) + expect(issued_query).to match(expected_query_regexp) end context 'when the relation has a subquery' do let(:relation) { User.where(state: User.select(:state), user_type: %i(support_bot alert_bot)).limit(1) } - let(:expected_query) do - <<~SQL - SELECT - "users".* - FROM - unnest(ARRAY(SELECT "users"."state" FROM "users")::character varying[]) AS "states"("state"), - unnest('{1,2}'::smallint[]) AS "user_types"("user_type"), - LATERAL ( - SELECT - "users".* - FROM - "users" - WHERE - (users."state" = "states"."state") AND - (users."user_type" = "user_types"."user_type") - LIMIT 1 - ) AS users - LIMIT 1 - SQL + let(:users_unnest) do + 'FROM + unnest\(ARRAY\(SELECT "users"."state" FROM "users"\)::character varying\[\]\) AS "states"\("state"\)\, + unnest\(\'{1\,2}\'::smallint\[\]\) AS "user_types"\("user_type"\)\, + LATERAL \(' + end + + let(:users_where) do + 'FROM + "users" + WHERE + \(users."state" = "states"."state"\) AND + \(users."user_type" = "user_types"."user_type"\) + LIMIT 1\) + AS users + LIMIT 1' end it 'changes the query' do - expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, '')) + expect(issued_query).to match(expected_query_regexp) end end context 'when there is an order' do let(:relation) { User.where(state: %w(active blocked banned)).order(order).limit(2) } - let(:expected_query) do - <<~SQL - SELECT - "users".* - FROM - unnest('{active,blocked,banned}'::charactervarying[]) AS "states"("state"), - LATERAL ( - SELECT - "users".* - FROM - "users" - WHERE - (users."state" = "states"."state") - ORDER BY - "users"."user_type" DESC - LIMIT 2 - ) AS users - ORDER BY - "users"."user_type" DESC - LIMIT 2 - SQL + + let(:users_unnest) do + 'FROM + unnest\(\'{active\,blocked\,banned}\'::character varying\[\]\) AS "states"\("state"\)\, + LATERAL \(' + end + + let(:users_where) do + 'FROM + "users" + WHERE + \(users."state" = "states"."state"\) + ORDER BY + "users"."user_type" DESC + LIMIT 2\) + AS users + ORDER BY + "users"."user_type" DESC + LIMIT 2' end context 'when the order is an Arel node' do let(:order) { { user_type: :desc } } it 'changes the query' do - expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, '')) + expect(issued_query).to match(expected_query_regexp) end end @@ -171,7 +171,7 @@ RSpec.describe UnnestedInFilters::Rewriter do end it 'changes the query' do - expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, '')) + expect(issued_query).to match(expected_query_regexp) end end end @@ -179,85 +179,82 @@ RSpec.describe UnnestedInFilters::Rewriter do context 'when the combined attributes include the primary key' do let(:relation) { User.where(user_type: %i(support_bot alert_bot)).order(id: :desc).limit(2) } - let(:expected_query) do - <<~SQL - SELECT - "users".* - FROM - "users" - WHERE - "users"."id" IN ( - SELECT - "users"."id" - FROM - unnest('{1,2}' :: smallint []) AS "user_types"("user_type"), - LATERAL ( - SELECT - "users"."user_type", - "users"."id" - FROM - "users" - WHERE - (users."user_type" = "user_types"."user_type") - ORDER BY - "users"."id" DESC - LIMIT - 2 - ) AS users - ORDER BY - "users"."id" DESC - LIMIT - 2 - ) - ORDER BY + let(:users_where) do + 'FROM + "users" + WHERE + "users"."id" IN + \(SELECT + "users"."id" + FROM + unnest\(\'{1\,2}\'::smallint\[\]\) AS "user_types"\("user_type"\)\, + LATERAL + \(SELECT + "users"."user_type"\, + "users"."id" + FROM + "users" + WHERE + \(users."user_type" = "user_types"."user_type"\) + ORDER BY + "users"."id" DESC + LIMIT 2\) + AS users + ORDER BY "users"."id" DESC - LIMIT - 2 - SQL + LIMIT 2\) + ORDER BY + "users"."id" DESC + LIMIT 2' + end + + let(:expected_query_regexp) do + Regexp.new("(#{users_select}|#{users_select_with_ignored_columns}) #{users_where}".squish) end it 'changes the query' do - expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, '')) + expect(issued_query).to match(expected_query_regexp) end end context 'when a join table is receiving an IN list query' do let(:relation) { User.joins(:status).where(status: { message: %w[foo bar] }).order(id: :desc).limit(2) } - let(:expected_query) do - <<~SQL - SELECT - "users".* - FROM - "users" - WHERE - "users"."id" IN ( - SELECT - "users"."id" - FROM - LATERAL ( - SELECT - message, - "users"."id" - FROM - "users" - INNER JOIN "user_statuses" "status" ON "status"."user_id" = "users"."id" - WHERE - "status"."message" IN ('foo', 'bar') - ORDER BY - "users"."id" DESC - LIMIT 2) AS users - ORDER BY - "users"."id" DESC - LIMIT 2) - ORDER BY + let(:users_where) do + 'FROM + "users" + WHERE + "users"."id" IN + \(SELECT + "users"."id" + FROM + LATERAL + \(SELECT + message, + "users"."id" + FROM + "users" + INNER JOIN "user_statuses" "status" ON "status"."user_id" = "users"."id" + WHERE + "status"."message" IN \(\'foo\'\, \'bar\'\) + ORDER BY + "users"."id" DESC + LIMIT 2\) + AS users + ORDER BY "users"."id" DESC - LIMIT 2 - SQL + LIMIT 2\) + ORDER BY + "users"."id" DESC + LIMIT 2' + end + + let(:expected_query_regexp) do + Regexp.new("(#{users_select}|#{users_select_with_ignored_columns}) #{users_where}".squish) end it 'does not rewrite the in statement for the joined table' do - expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, '')) + expect(issued_query).to match(expected_query_regexp) end end diff --git a/spec/lib/users/internal_spec.rb b/spec/lib/users/internal_spec.rb new file mode 100644 index 00000000000..b7368f5042e --- /dev/null +++ b/spec/lib/users/internal_spec.rb @@ -0,0 +1,97 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Users::Internal, feature_category: :user_profile do + shared_examples 'bot users' do |bot_type, username, email| + it 'creates the user if it does not exist' do + expect do + described_class.public_send(bot_type) + end.to change { User.where(user_type: bot_type).count }.by(1) + end + + it 'creates a route for the namespace of the created user' do + bot_user = described_class.public_send(bot_type) + + expect(bot_user.namespace.route).to be_present + end + + it 'does not create a new user if it already exists' do + described_class.public_send(bot_type) + + expect do + described_class.public_send(bot_type) + end.not_to change { User.count } + end + + context 'when a regular user exists with the bot usernane' do + it 'creates a user with a non-conflicting username' do + create(:user, username: username) + + expect do + described_class.public_send(bot_type) + end.to change { User.where(user_type: bot_type).count }.by(1) + end + end + + context 'when a regular user exists with the bot user email' do + it 'creates a user with a non-conflicting email' do + create(:user, email: email) + + expect do + described_class.public_send(bot_type) + end.to change { User.where(user_type: bot_type).count }.by(1) + end + end + + context 'when a domain allowlist is in place' do + before do + stub_application_setting(domain_allowlist: ['gitlab.com']) + end + + it 'creates the bot user' do + expect do + described_class.public_send(bot_type) + end.to change { User.where(user_type: bot_type).count }.by(1) + end + end + end + + shared_examples 'bot user avatars' do |bot_type, avatar_filename| + it 'sets the custom avatar for the created bot' do + bot_user = described_class.public_send(bot_type) + + expect(bot_user.avatar.url).to be_present + expect(bot_user.avatar.filename).to eq(avatar_filename) + end + end + + it_behaves_like 'bot users', :alert_bot, 'alert-bot', 'alert@example.com' + it_behaves_like 'bot users', :support_bot, 'support-bot', 'support@example.com' + it_behaves_like 'bot users', :migration_bot, 'migration-bot', 'noreply+gitlab-migration-bot@example.com' + it_behaves_like 'bot users', :security_bot, 'GitLab-Security-Bot', 'security-bot@example.com' + it_behaves_like 'bot users', :ghost, 'ghost', 'ghost@example.com' + it_behaves_like 'bot users', :automation_bot, 'automation-bot', 'automation@example.com' + it_behaves_like 'bot users', :llm_bot, 'GitLab-Llm-Bot', 'llm-bot@example.com' + it_behaves_like 'bot users', :admin_bot, 'GitLab-Admin-Bot', 'admin-bot@example.com' + + it_behaves_like 'bot user avatars', :alert_bot, 'alert-bot.png' + it_behaves_like 'bot user avatars', :support_bot, 'support-bot.png' + it_behaves_like 'bot user avatars', :security_bot, 'security-bot.png' + it_behaves_like 'bot user avatars', :automation_bot, 'support-bot.png' + it_behaves_like 'bot user avatars', :llm_bot, 'support-bot.png' + it_behaves_like 'bot user avatars', :admin_bot, 'admin-bot.png' + + context 'when bot is the support_bot' do + subject { described_class.support_bot } + + it { is_expected.to be_confirmed } + end + + context 'when bot is the admin bot' do + subject { described_class.admin_bot } + + it { is_expected.to be_admin } + it { is_expected.to be_confirmed } + end +end -- cgit v1.2.3