Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/after_commit_queue_spec.rb17
-rw-r--r--spec/lib/api/ci/helpers/runner_helpers_spec.rb5
-rw-r--r--spec/lib/api/entities/changelog_spec.rb17
-rw-r--r--spec/lib/api/entities/ci/pipeline_spec.rb16
-rw-r--r--spec/lib/api/entities/personal_access_token_spec.rb26
-rw-r--r--spec/lib/api/entities/plan_limit_spec.rb1
-rw-r--r--spec/lib/api/entities/project_import_failed_relation_spec.rb3
-rw-r--r--spec/lib/api/entities/project_import_status_spec.rb60
-rw-r--r--spec/lib/banzai/filter/external_link_filter_spec.rb11
-rw-r--r--spec/lib/banzai/filter/footnote_filter_spec.rb45
-rw-r--r--spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb (renamed from spec/lib/banzai/filter/issuable_state_filter_spec.rb)66
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/plantuml_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/references/reference_cache_spec.rb12
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb5
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb42
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb17
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/reference_parser/base_parser_spec.rb8
-rw-r--r--spec/lib/banzai/render_context_spec.rb10
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb96
-rw-r--r--spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb10
-rw-r--r--spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb10
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb127
-rw-r--r--spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb36
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb116
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb77
-rw-r--r--spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb22
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb8
-rw-r--r--spec/lib/bulk_imports/ndjson_pipeline_spec.rb26
-rw-r--r--spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb58
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb52
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb176
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb40
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb64
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb159
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb45
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb11
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb27
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb119
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb168
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb12
-rw-r--r--spec/lib/error_tracking/collector/payload_validator_spec.rb16
-rw-r--r--spec/lib/feature/definition_spec.rb72
-rw-r--r--spec/lib/feature_spec.rb109
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb12
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb11
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb13
-rw-r--r--spec/lib/gitlab/anonymous_session_spec.rb14
-rw-r--r--spec/lib/gitlab/application_context_spec.rb32
-rw-r--r--spec/lib/gitlab/application_rate_limiter_spec.rb67
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb12
-rw-r--r--spec/lib/gitlab/auth/user_access_denied_reason_spec.rb1
-rw-r--r--spec/lib/gitlab/auth_spec.rb36
-rw-r--r--spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb66
-rw-r--r--spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb61
-rw-r--r--spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb44
-rw-r--r--spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb23
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb24
-rw-r--r--spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb32
-rw-r--r--spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb56
-rw-r--r--spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb49
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb104
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb98
-rw-r--r--spec/lib/gitlab/background_migration/job_coordinator_spec.rb22
-rw-r--r--spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb113
-rw-r--r--spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb93
-rw-r--r--spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb36
-rw-r--r--spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb43
-rw-r--r--spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb94
-rw-r--r--spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb44
-rw-r--r--spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb70
-rw-r--r--spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb141
-rw-r--r--spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb224
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb38
-rw-r--r--spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb175
-rw-r--r--spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb66
-rw-r--r--spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb42
-rw-r--r--spec/lib/gitlab/background_migration/reset_merge_status_spec.rb48
-rw-r--r--spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb35
-rw-r--r--spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb51
-rw-r--r--spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb74
-rw-r--r--spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb40
-rw-r--r--spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb15
-rw-r--r--spec/lib/gitlab/background_migration_spec.rb5
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/build/context/build_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/build/context/global_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/build/policy/variables_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/config/entry/tags_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/config/external/rules_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb97
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb70
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb94
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb85
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/logger_spec.rb132
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb83
-rw-r--r--spec/lib/gitlab/ci/status/bridge/common_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/tags/bulk_insert_spec.rb39
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb6
-rw-r--r--spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb2
-rw-r--r--spec/lib/gitlab/config/entry/undefined_spec.rb6
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb48
-rw-r--r--spec/lib/gitlab/contributions_calendar_spec.rb38
-rw-r--r--spec/lib/gitlab/daemon_spec.rb24
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_creator_spec.rb17
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb12
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb38
-rw-r--r--spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb16
-rw-r--r--spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb8
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb9
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb18
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb8
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb115
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb17
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb18
-rw-r--r--spec/lib/gitlab/database/loose_foreign_keys_spec.rb45
-rw-r--r--spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb9
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb231
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb164
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb17
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_details_spec.rb4
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_log_spec.rb4
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb4
-rw-r--r--spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb4
-rw-r--r--spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb9
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb4
-rw-r--r--spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb12
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb28
-rw-r--r--spec/lib/gitlab/database/partitioning/single_numeric_list_partition_spec.rb50
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb214
-rw-r--r--spec/lib/gitlab/database/query_analyzer_spec.rb9
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb62
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb10
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb74
-rw-r--r--spec/lib/gitlab/database/shared_model_spec.rb12
-rw-r--r--spec/lib/gitlab/database/type/json_pg_safe_spec.rb26
-rw-r--r--spec/lib/gitlab/diff/custom_diff_spec.rb62
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb6
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb14
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb11
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb127
-rw-r--r--spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb1
-rw-r--r--spec/lib/gitlab/email/service_desk_receiver_spec.rb34
-rw-r--r--spec/lib/gitlab/empty_search_results_spec.rb31
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb10
-rw-r--r--spec/lib/gitlab/etag_caching/store_spec.rb14
-rw-r--r--spec/lib/gitlab/exception_log_formatter_spec.rb57
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb2
-rw-r--r--spec/lib/gitlab/experimentation/experiment_spec.rb1
-rw-r--r--spec/lib/gitlab/git/diff_stats_collection_spec.rb2
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb2
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb10
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb43
-rw-r--r--spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb71
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/parallel_importer_spec.rb5
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb2
-rw-r--r--spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb2
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb6
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb6
-rw-r--r--spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb14
-rw-r--r--spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb13
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/import/import_failure_service_spec.rb83
-rw-r--r--spec/lib/gitlab/import/set_async_jid_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml5
-rw-r--r--spec/lib/gitlab/import_export/attributes_permitter_spec.rb7
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/lets_encrypt/client_spec.rb4
-rw-r--r--spec/lib/gitlab/lograge/custom_options_spec.rb50
-rw-r--r--spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb290
-rw-r--r--spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb219
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb38
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb161
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb1
-rw-r--r--spec/lib/gitlab/multi_collection_paginator_spec.rb2
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb2
-rw-r--r--spec/lib/gitlab/pagination/offset_pagination_spec.rb37
-rw-r--r--spec/lib/gitlab/patch/legacy_database_config_spec.rb3
-rw-r--r--spec/lib/gitlab/process_management_spec.rb144
-rw-r--r--spec/lib/gitlab/quick_actions/dsl_spec.rb4
-rw-r--r--spec/lib/gitlab/rack_attack_spec.rb12
-rw-r--r--spec/lib/gitlab/rate_limit_helpers_spec.rb50
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb268
-rw-r--r--spec/lib/gitlab/redis/sessions_spec.rb87
-rw-r--r--spec/lib/gitlab/regex_spec.rb48
-rw-r--r--spec/lib/gitlab/relative_positioning/range_spec.rb18
-rw-r--r--spec/lib/gitlab/repository_archive_rate_limiter_spec.rb56
-rw-r--r--spec/lib/gitlab/saas_spec.rb13
-rw-r--r--spec/lib/gitlab/search/abuse_detection_spec.rb114
-rw-r--r--spec/lib/gitlab/search/abuse_validators/no_abusive_coercion_from_string_validator_spec.rb35
-rw-r--r--spec/lib/gitlab/search/abuse_validators/no_abusive_term_length_validator_spec.rb71
-rw-r--r--spec/lib/gitlab/search/params_spec.rb136
-rw-r--r--spec/lib/gitlab/security/scan_configuration_spec.rb64
-rw-r--r--spec/lib/gitlab/sidekiq_enq_spec.rb93
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb21
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb69
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb24
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb20
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking_spec.rb6
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb9
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb28
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb368
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb151
-rw-r--r--spec/lib/gitlab/utils_spec.rb12
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb43
-rw-r--r--spec/lib/sidebars/concerns/link_with_html_options_spec.rb39
-rw-r--r--spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb59
-rw-r--r--spec/lib/sidebars/menu_spec.rb19
-rw-r--r--spec/lib/sidebars/projects/menus/shimo_menu_spec.rb44
-rw-r--r--spec/lib/version_check_spec.rb60
249 files changed, 6631 insertions, 3961 deletions
diff --git a/spec/lib/after_commit_queue_spec.rb b/spec/lib/after_commit_queue_spec.rb
deleted file mode 100644
index ca383808bfc..00000000000
--- a/spec/lib/after_commit_queue_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe AfterCommitQueue do
- it 'runs after transaction is committed' do
- called = false
- test_proc = proc { called = true }
-
- project = build(:project)
- project.run_after_commit(&test_proc)
-
- project.save
-
- expect(called).to be true
- end
-end
diff --git a/spec/lib/api/ci/helpers/runner_helpers_spec.rb b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
index c6638bea59e..c4d740f0adc 100644
--- a/spec/lib/api/ci/helpers/runner_helpers_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe API::Ci::Helpers::Runner do
let(:revision) { '10.0' }
let(:platform) { 'test' }
let(:architecture) { 'arm' }
+ let(:executor) { 'shell' }
let(:config) { { 'gpus' => 'all' } }
let(:runner_params) do
{
@@ -48,6 +49,7 @@ RSpec.describe API::Ci::Helpers::Runner do
'revision' => revision,
'platform' => platform,
'architecture' => architecture,
+ 'executor' => executor,
'config' => config,
'ignored' => 1
}
@@ -57,12 +59,13 @@ RSpec.describe API::Ci::Helpers::Runner do
subject(:details) { runner_helper.get_runner_details_from_request }
it 'extracts the runner details', :aggregate_failures do
- expect(details.keys).to match_array(%w(name version revision platform architecture config ip_address))
+ expect(details.keys).to match_array(%w(name version revision platform architecture executor config ip_address))
expect(details['name']).to eq(name)
expect(details['version']).to eq(version)
expect(details['revision']).to eq(revision)
expect(details['platform']).to eq(platform)
expect(details['architecture']).to eq(architecture)
+ expect(details['executor']).to eq(executor)
expect(details['config']).to eq(config)
expect(details['ip_address']).to eq(ip_address)
end
diff --git a/spec/lib/api/entities/changelog_spec.rb b/spec/lib/api/entities/changelog_spec.rb
new file mode 100644
index 00000000000..2cf585d4e0e
--- /dev/null
+++ b/spec/lib/api/entities/changelog_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Changelog do
+ let(:changelog) { "This is a changelog" }
+
+ subject { described_class.new(changelog).as_json }
+
+ it 'exposes correct attributes' do
+ expect(subject).to include(:notes)
+ end
+
+ it 'exposes correct notes' do
+ expect(subject[:notes]).to eq(changelog)
+ end
+end
diff --git a/spec/lib/api/entities/ci/pipeline_spec.rb b/spec/lib/api/entities/ci/pipeline_spec.rb
new file mode 100644
index 00000000000..6a658cc3e18
--- /dev/null
+++ b/spec/lib/api/entities/ci/pipeline_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Ci::Pipeline do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+ let_it_be(:job) { create(:ci_build, name: "rspec", coverage: 30.212, pipeline: pipeline) }
+
+ let(:entity) { described_class.new(pipeline) }
+
+ subject { entity.as_json }
+
+ it 'returns the coverage as a string' do
+ expect(subject[:coverage]).to eq '30.21'
+ end
+end
diff --git a/spec/lib/api/entities/personal_access_token_spec.rb b/spec/lib/api/entities/personal_access_token_spec.rb
new file mode 100644
index 00000000000..fd3c53a21b4
--- /dev/null
+++ b/spec/lib/api/entities/personal_access_token_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::PersonalAccessToken do
+ describe '#as_json' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user, expires_at: nil) }
+
+ let(:entity) { described_class.new(token) }
+
+ it 'returns token data' do
+ expect(entity.as_json).to eq({
+ id: token.id,
+ name: token.name,
+ revoked: false,
+ created_at: token.created_at,
+ scopes: ['api'],
+ user_id: user.id,
+ last_used_at: nil,
+ active: true,
+ expires_at: nil
+ })
+ end
+ end
+end
diff --git a/spec/lib/api/entities/plan_limit_spec.rb b/spec/lib/api/entities/plan_limit_spec.rb
index 75e39e4f074..1b8b21d47f3 100644
--- a/spec/lib/api/entities/plan_limit_spec.rb
+++ b/spec/lib/api/entities/plan_limit_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe API::Entities::PlanLimit do
expect(subject).to include(
:conan_max_file_size,
:generic_packages_max_file_size,
+ :helm_max_file_size,
:maven_max_file_size,
:npm_max_file_size,
:nuget_max_file_size,
diff --git a/spec/lib/api/entities/project_import_failed_relation_spec.rb b/spec/lib/api/entities/project_import_failed_relation_spec.rb
index d3c24f6fce3..d6143915ecb 100644
--- a/spec/lib/api/entities/project_import_failed_relation_spec.rb
+++ b/spec/lib/api/entities/project_import_failed_relation_spec.rb
@@ -16,7 +16,8 @@ RSpec.describe API::Entities::ProjectImportFailedRelation do
exception_class: import_failure.exception_class,
exception_message: nil,
relation_name: import_failure.relation_key,
- source: import_failure.source
+ source: import_failure.source,
+ line_number: import_failure.relation_index
)
end
end
diff --git a/spec/lib/api/entities/project_import_status_spec.rb b/spec/lib/api/entities/project_import_status_spec.rb
index 5eda613a6a6..37a18718950 100644
--- a/spec/lib/api/entities/project_import_status_spec.rb
+++ b/spec/lib/api/entities/project_import_status_spec.rb
@@ -2,29 +2,32 @@
require 'spec_helper'
-RSpec.describe API::Entities::ProjectImportStatus do
+RSpec.describe API::Entities::ProjectImportStatus, :aggregate_failures do
describe '#as_json' do
subject { entity.as_json }
let(:correlation_id) { 'cid' }
context 'when no import state exists' do
- let(:entity) { described_class.new(build(:project)) }
+ let(:entity) { described_class.new(build(:project, import_type: 'import_type')) }
it 'includes basic fields and no failures' do
expect(subject[:import_status]).to eq('none')
+ expect(subject[:import_type]).to eq('import_type')
expect(subject[:correlation_id]).to be_nil
expect(subject[:import_error]).to be_nil
expect(subject[:failed_relations]).to eq([])
+ expect(subject[:stats]).to be_nil
end
end
context 'when import has not finished yet' do
- let(:project) { create(:project, :import_scheduled, import_correlation_id: correlation_id) }
- let(:entity) { described_class.new(project) }
+ let(:project) { create(:project, :import_scheduled, import_type: 'import_type', import_correlation_id: correlation_id) }
+ let(:entity) { described_class.new(project, import_type: 'import_type') }
- it 'includes basic fields and no failures', :aggregate_failures do
+ it 'includes basic fields and no failures' do
expect(subject[:import_status]).to eq('scheduled')
+ expect(subject[:import_type]).to eq('import_type')
expect(subject[:correlation_id]).to eq(correlation_id)
expect(subject[:import_error]).to be_nil
expect(subject[:failed_relations]).to eq([])
@@ -32,29 +35,64 @@ RSpec.describe API::Entities::ProjectImportStatus do
end
context 'when import has finished with failed relations' do
- let(:project) { create(:project, :import_finished, import_correlation_id: correlation_id) }
+ let(:project) { create(:project, :import_finished, import_type: 'import_type', import_correlation_id: correlation_id) }
let(:entity) { described_class.new(project) }
- it 'includes basic fields with failed relations', :aggregate_failures do
- create(:import_failure, :hard_failure, project: project, correlation_id_value: correlation_id)
+ it 'includes basic fields with failed relations' do
+ create(
+ :import_failure,
+ :hard_failure,
+ project: project,
+ correlation_id_value: correlation_id,
+ relation_key: 'issues',
+ relation_index: 1
+ )
+
+ # Doesn't show soft failures
+ create(:import_failure, :soft_failure)
expect(subject[:import_status]).to eq('finished')
+ expect(subject[:import_type]).to eq('import_type')
expect(subject[:correlation_id]).to eq(correlation_id)
expect(subject[:import_error]).to be_nil
- expect(subject[:failed_relations]).not_to be_empty
+ expect(subject[:failed_relations].length).to eq(1)
+
+ failure = subject[:failed_relations].last
+ expect(failure[:exception_class]).to eq('RuntimeError')
+ expect(failure[:source]).to eq('method_call')
+ expect(failure[:relation_name]).to eq('issues')
+ expect(failure[:line_number]).to eq(1)
end
end
context 'when import has failed' do
- let(:project) { create(:project, :import_failed, import_correlation_id: correlation_id, import_last_error: 'error') }
+ let(:project) { create(:project, :import_failed, import_type: 'import_type', import_correlation_id: correlation_id, import_last_error: 'error') }
let(:entity) { described_class.new(project) }
- it 'includes basic fields with import error', :aggregate_failures do
+ it 'includes basic fields with import error' do
expect(subject[:import_status]).to eq('failed')
+ expect(subject[:import_type]).to eq('import_type')
expect(subject[:correlation_id]).to eq(correlation_id)
expect(subject[:import_error]).to eq('error')
expect(subject[:failed_relations]).to eq([])
end
end
+
+ context 'when importing from github', :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :import_failed, import_type: 'github') }
+ let(:entity) { described_class.new(project) }
+
+ before do
+ ::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :fetched, value: 10)
+ ::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :imported, value: 8)
+ end
+
+ it 'exposes the import stats' do
+ expect(subject[:stats]).to eq(
+ 'fetched' => { 'issues' => 10 },
+ 'imported' => { 'issues' => 8 }
+ )
+ end
+ end
end
end
diff --git a/spec/lib/banzai/filter/external_link_filter_spec.rb b/spec/lib/banzai/filter/external_link_filter_spec.rb
index 630730dfc1a..24d13bdb42c 100644
--- a/spec/lib/banzai/filter/external_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/external_link_filter_spec.rb
@@ -191,4 +191,15 @@ RSpec.describe Banzai::Filter::ExternalLinkFilter do
end
end
end
+
+ context 'for links that have `rel=license`' do
+ let(:doc) { filter %q(<a rel="license" href="http://example.com">rel-license</a>) }
+
+ it_behaves_like 'an external link with rel attribute'
+
+ it 'maintains rel license' do
+ expect(doc.at_css('a')).to have_attribute('rel')
+ expect(doc.at_css('a')['rel']).to include 'license'
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/footnote_filter_spec.rb b/spec/lib/banzai/filter/footnote_filter_spec.rb
index 54faa748d53..d41f5e8633d 100644
--- a/spec/lib/banzai/filter/footnote_filter_spec.rb
+++ b/spec/lib/banzai/filter/footnote_filter_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::FootnoteFilter do
include FilterSpecHelper
+ using RSpec::Parameterized::TableSyntax
# rubocop:disable Style/AsciiComments
# first[^1] and second[^second] and third[^_😄_]
@@ -13,16 +14,16 @@ RSpec.describe Banzai::Filter::FootnoteFilter do
# rubocop:enable Style/AsciiComments
let(:footnote) do
<<~EOF.strip_heredoc
- <p>first<sup><a href="#fn-1" id="fnref-1">1</a></sup> and second<sup><a href="#fn-second" id="fnref-second">2</a></sup> and third<sup><a href="#fn-_%F0%9F%98%84_" id="fnref-_%F0%9F%98%84_">3</a></sup></p>
-
+ <p>first<sup><a href="#fn-1" id="fnref-1" data-footnote-ref>1</a></sup> and second<sup><a href="#fn-second" id="fnref-second" data-footnote-ref>2</a></sup> and third<sup><a href="#fn-_%F0%9F%98%84_" id="fnref-_%F0%9F%98%84_" data-footnote-ref>3</a></sup></p>
+ <section data-footnotes>
<ol>
<li id="fn-1">
- <p>one <a href="#fnref-1" aria-label="Back to content">↩</a></p>
+ <p>one <a href="#fnref-1" aria-label="Back to content" data-footnote-backref>↩</a></p>
</li>
<li id="fn-second">
- <p>two <a href="#fnref-second" aria-label="Back to content">↩</a></p>
+ <p>two <a href="#fnref-second" aria-label="Back to content" data-footnote-backref>↩</a></p>
</li>\n<li id="fn-_%F0%9F%98%84_">
- <p>three <a href="#fnref-_%F0%9F%98%84_" aria-label="Back to content">↩</a></p>
+ <p>three <a href="#fnref-_%F0%9F%98%84_" aria-label="Back to content" data-footnote-backref>↩</a></p>
</li>
</ol>
EOF
@@ -30,19 +31,20 @@ RSpec.describe Banzai::Filter::FootnoteFilter do
let(:filtered_footnote) do
<<~EOF.strip_heredoc
- <p>first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref="">1</a></sup> and second<sup class="footnote-ref"><a href="#fn-second-#{identifier}" id="fnref-second-#{identifier}" data-footnote-ref="">2</a></sup> and third<sup class="footnote-ref"><a href="#fn-_%F0%9F%98%84_-#{identifier}" id="fnref-_%F0%9F%98%84_-#{identifier}" data-footnote-ref="">3</a></sup></p>
-
- <section class=\"footnotes\" data-footnotes><ol>
+ <p>first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref>1</a></sup> and second<sup class="footnote-ref"><a href="#fn-second-#{identifier}" id="fnref-second-#{identifier}" data-footnote-ref>2</a></sup> and third<sup class="footnote-ref"><a href="#fn-_%F0%9F%98%84_-#{identifier}" id="fnref-_%F0%9F%98%84_-#{identifier}" data-footnote-ref>3</a></sup></p>
+ <section data-footnotes class=\"footnotes\">
+ <ol>
<li id="fn-1-#{identifier}">
- <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p>
+ <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p>
</li>
<li id="fn-second-#{identifier}">
- <p>two <a href="#fnref-second-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p>
+ <p>two <a href="#fnref-second-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p>
</li>
<li id="fn-_%F0%9F%98%84_-#{identifier}">
- <p>three <a href="#fnref-_%F0%9F%98%84_-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p>
+ <p>three <a href="#fnref-_%F0%9F%98%84_-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p>
</li>
- </ol></section>
+ </ol>
+ </section>
EOF
end
@@ -52,7 +54,7 @@ RSpec.describe Banzai::Filter::FootnoteFilter do
let(:identifier) { link_node[:id].delete_prefix('fnref-1-') }
it 'properly adds the necessary ids and classes' do
- expect(doc.to_html).to eq filtered_footnote
+ expect(doc.to_html).to eq filtered_footnote.strip
end
context 'using ruby-based HTML renderer' do
@@ -101,4 +103,21 @@ RSpec.describe Banzai::Filter::FootnoteFilter do
end
end
end
+
+ context 'when detecting footnotes' do
+ where(:valid, :markdown) do
+ true | "1. one[^1]\n[^1]: AbC"
+ true | "1. one[^abc]\n[^abc]: AbC"
+ false | '1. [one](#fnref-abc)'
+ false | "1. one[^1]\n[^abc]: AbC"
+ end
+
+ with_them do
+ it 'detects valid footnotes' do
+ result = Banzai::Pipeline::FullPipeline.call(markdown, project: nil)
+
+ expect(result[:output].at_css('section.footnotes').present?).to eq(valid)
+ end
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/issuable_state_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
index a3851fd7cca..0840ccf19e4 100644
--- a/spec/lib/banzai/filter/issuable_state_filter_spec.rb
+++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
@@ -2,28 +2,27 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::IssuableStateFilter do
- include ActionView::Helpers::UrlHelper
+RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter do
include FilterSpecHelper
- let(:user) { create(:user) }
- let(:context) { { current_user: user, issuable_state_filter_enabled: true } }
- let(:closed_issue) { create_issue(:closed) }
- let(:project) { create(:project, :public) }
- let(:group) { create(:group) }
- let(:other_project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:other_project) { create(:project, :public) }
+ let_it_be(:closed_issue) { create_issue(:closed) }
+
+ let(:context) { { current_user: user, issuable_reference_expansion_enabled: true } }
def create_link(text, data)
- link_to(text, '', class: 'gfm has-tooltip', data: data)
+ ActionController::Base.helpers.link_to(text, '', class: 'gfm has-tooltip', data: data)
end
- def create_issue(state)
- create(:issue, state, project: project)
+ def create_issue(state, attributes = {})
+ create(:issue, state, attributes.merge(project: project))
end
- def create_merge_request(state)
- create(:merge_request, state,
- source_project: project, target_project: project)
+ def create_merge_request(state, attributes = {})
+ create(:merge_request, state, attributes.merge(source_project: project, target_project: project))
end
it 'ignores non-GFM links' do
@@ -139,6 +138,30 @@ RSpec.describe Banzai::Filter::IssuableStateFilter do
expect(doc.css('a').last.text).to eq("#{moved_issue.to_reference} (moved)")
end
+
+ it 'shows title for references with +' do
+ issue = create_issue(:opened, title: 'Some issue')
+ link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue', reference_format: '+')
+ doc = filter(link, context)
+
+ expect(doc.css('a').last.text).to eq("#{issue.title} (#{issue.to_reference})")
+ end
+
+ it 'truncates long title for references with +' do
+ issue = create_issue(:opened, title: 'Some issue ' * 10)
+ link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue', reference_format: '+')
+ doc = filter(link, context)
+
+ expect(doc.css('a').last.text).to eq("#{issue.title.truncate(50)} (#{issue.to_reference})")
+ end
+
+ it 'shows both title and state for closed references with +' do
+ issue = create_issue(:closed, title: 'Some issue')
+ link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue', reference_format: '+')
+ doc = filter(link, context)
+
+ expect(doc.css('a').last.text).to eq("#{issue.title} (#{issue.to_reference} - closed)")
+ end
end
context 'for merge request references' do
@@ -197,5 +220,20 @@ RSpec.describe Banzai::Filter::IssuableStateFilter do
expect(doc.css('a').last.text).to eq("#{merge_request.to_reference} (merged)")
end
+
+ it 'shows title for references with +' do
+ merge_request = create_merge_request(:opened, title: 'Some merge request')
+
+ link = create_link(
+ merge_request.to_reference,
+ merge_request: merge_request.id,
+ reference_type: 'merge_request',
+ reference_format: '+'
+ )
+
+ doc = filter(link, context)
+
+ expect(doc.css('a').last.text).to eq("#{merge_request.title} (#{merge_request.to_reference})")
+ end
end
end
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
index a310de5c015..1c9b894e885 100644
--- a/spec/lib/banzai/filter/markdown_filter_spec.rb
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do
it 'adds language to lang attribute when specified' do
result = filter("```html\nsome code\n```", no_sourcepos: true)
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
expect(result).to start_with('<pre lang="html"><code>')
else
expect(result).to start_with('<pre><code lang="html">')
@@ -49,7 +49,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do
it 'works with utf8 chars in language' do
result = filter("```日\nsome code\n```", no_sourcepos: true)
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
expect(result).to start_with('<pre lang="日"><code>')
else
expect(result).to start_with('<pre><code lang="日">')
@@ -59,7 +59,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do
it 'works with additional language parameters' do
result = filter("```ruby:red gem foo\nsome code\n```", no_sourcepos: true)
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
expect(result).to start_with('<pre lang="ruby:red" data-meta="gem foo"><code>')
else
expect(result).to start_with('<pre><code lang="ruby:red gem foo">')
@@ -102,7 +102,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do
expect(result).to include('<td>foot <sup')
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
expect(result).to include('<section class="footnotes" data-footnotes>')
else
expect(result).to include('<section class="footnotes">')
diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb
index d1a3b5689a8..e1e02c09fbe 100644
--- a/spec/lib/banzai/filter/plantuml_filter_spec.rb
+++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do
it 'replaces plantuml pre tag with img tag' do
stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080")
- input = if Feature.enabled?(:use_cmark_renderer)
+ input = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
'<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
else
'<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>'
@@ -24,7 +24,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do
it 'does not replace plantuml pre tag with img tag if disabled' do
stub_application_setting(plantuml_enabled: false)
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
output = '<pre lang="plantuml"><code>Bob -&gt; Sara : Hello</code></pre>'
else
@@ -40,7 +40,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do
it 'does not replace plantuml pre tag with img tag if url is invalid' do
stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid")
- input = if Feature.enabled?(:use_cmark_renderer)
+ input = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
'<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
else
'<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>'
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index 88c2494b243..14c1542b724 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -116,6 +116,22 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
expect(doc.children.first.attr('data-original')).to eq inner_html
end
+ it 'includes a data-reference-format attribute' do
+ doc = reference_filter("Issue #{reference}+")
+ link = doc.css('a').first
+
+ expect(link).to have_attribute('data-reference-format')
+ expect(link.attr('data-reference-format')).to eq('+')
+ end
+
+ it 'includes a data-reference-format attribute for URL references' do
+ doc = reference_filter("Issue #{issue_url}+")
+ link = doc.css('a').first
+
+ expect(link).to have_attribute('data-reference-format')
+ expect(link.attr('data-reference-format')).to eq('+')
+ end
+
it 'supports an :only_path context' do
doc = reference_filter("Issue #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index ee2ce967a47..3c488820853 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -109,6 +109,22 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(link.attr('data-merge-request')).to eq merge.id.to_s
end
+ it 'includes a data-reference-format attribute' do
+ doc = reference_filter("Merge #{reference}+")
+ link = doc.css('a').first
+
+ expect(link).to have_attribute('data-reference-format')
+ expect(link.attr('data-reference-format')).to eq('+')
+ end
+
+ it 'includes a data-reference-format attribute for URL references' do
+ doc = reference_filter("Merge #{urls.project_merge_request_url(project, merge)}+")
+ link = doc.css('a').first
+
+ expect(link).to have_attribute('data-reference-format')
+ expect(link.attr('data-reference-format')).to eq('+')
+ end
+
it 'supports an :only_path context' do
doc = reference_filter("Merge #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb
index dcd153da16a..dc43c33a08d 100644
--- a/spec/lib/banzai/filter/references/reference_cache_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb
@@ -35,18 +35,6 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do
subject
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(reference_cache_memoization: false)
- end
-
- it 'ignores memoized rendered HTML' do
- expect(doc).to receive(:to_html).and_call_original
-
- subject
- end
- end
end
context 'when result is not available' do
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index 8eb8e5cf800..24e787bddd5 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -115,6 +115,11 @@ RSpec.describe Banzai::Filter::SanitizationFilter do
expect(filter(act).to_html).to eq exp
end
+ it 'allows `rel=license` in links' do
+ exp = act = '<a rel="license" href="http://example.com">rel-license</a>'
+ expect(filter(act).to_html).to eq exp
+ end
+
it 'allows `data-math-style` attribute on `code` and `pre` elements' do
html = <<-HTML
<pre class="code" data-math-style="inline">something</pre>
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index dfe022b51d2..ef46fd62486 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre><code>def fun end</code></pre>')
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", ""
@@ -40,13 +40,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
context "when a valid language is specified" do
it "highlights as that language" do
- result = if Feature.enabled?(:use_cmark_renderer)
+ result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
filter('<pre lang="ruby"><code>def fun end</code></pre>')
else
filter('<pre><code lang="ruby">def fun end</code></pre>')
end
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", "ruby"
@@ -54,13 +54,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
context "when an invalid language is specified" do
it "highlights as plaintext" do
- result = if Feature.enabled?(:use_cmark_renderer)
+ result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
filter('<pre lang="gnuplot"><code>This is a test</code></pre>')
else
filter('<pre><code lang="gnuplot">This is a test</code></pre>')
end
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", "gnuplot"
@@ -73,13 +73,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
%w(math mermaid plantuml suggestion).each do |lang|
context "when #{lang} is specified" do
it "highlights as plaintext but with the correct language attribute and class" do
- result = if Feature.enabled?(:use_cmark_renderer)
+ result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
filter(%{<pre lang="#{lang}"><code>This is a test</code></pre>})
else
filter(%{<pre><code lang="#{lang}">This is a test</code></pre>})
end
- expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
end
include_examples "XSS prevention", lang
@@ -89,7 +89,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
let(:lang_params) { 'foo-bar-kux' }
let(:xss_lang) do
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
"#{lang} data-meta=\"foo-bar-kux\"&lt;script&gt;alert(1)&lt;/script&gt;"
else
"#{lang}#{described_class::LANG_PARAMS_DELIMITER}&lt;script&gt;alert(1)&lt;/script&gt;"
@@ -97,18 +97,18 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
end
it "includes data-lang-params tag with extra information" do
- result = if Feature.enabled?(:use_cmark_renderer)
+ result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
filter(%{<pre lang="#{lang}" data-meta="#{lang_params}"><code>This is a test</code></pre>})
else
filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}">This is a test</code></pre>})
end
- expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
end
include_examples "XSS prevention", lang
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
include_examples "XSS prevention",
"#{lang} data-meta=\"foo-bar-kux\"&lt;script&gt;alert(1)&lt;/script&gt;"
else
@@ -126,19 +126,19 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
let(:lang_params) { '-1+10' }
let(:expected_result) do
- %{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}
+ %{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}
end
context 'when delimiter is space' do
it 'delimits on the first appearance' do
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
result = filter(%{<pre lang="#{lang}" data-meta="#{lang_params} more-things"><code>This is a test</code></pre>})
- expect(result.to_html).to eq(expected_result)
+ expect(result.to_html.delete("\n")).to eq(expected_result)
else
result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}#{delimiter}more-things">This is a test</code></pre>})
- expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
end
end
end
@@ -147,10 +147,10 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it 'delimits on the first appearance' do
result = filter(%{<pre lang="#{lang}#{delimiter}#{lang_params} more-things"><code>This is a test</code></pre>})
- if Feature.enabled?(:use_cmark_renderer)
- expect(result.to_html).to eq(expected_result)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
+ expect(result.to_html.delete("\n")).to eq(expected_result)
else
- expect(result.to_html).to eq(%{<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">This is a test</span></code></pre>})
+ expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">This is a test</span></code></pre><copy-code></copy-code></div>})
end
end
end
@@ -161,7 +161,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "includes it in the highlighted code block" do
result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>')
- expect(result.to_html).to eq('<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
end
end
@@ -173,13 +173,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
end
it "highlights as plaintext" do
- result = if Feature.enabled?(:use_cmark_renderer)
+ result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
filter('<pre lang="ruby"><code>This is a test</code></pre>')
else
filter('<pre><code lang="ruby">This is a test</code></pre>')
end
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", "ruby"
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 01bca7b23e8..620b7d97a5b 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -43,26 +43,27 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
let(:filtered_footnote) do
<<~EOF.strip_heredoc
- <p dir="auto">first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref="">1</a></sup> and second<sup class="footnote-ref"><a href="#fn-%F0%9F%98%84second-#{identifier}" id="fnref-%F0%9F%98%84second-#{identifier}" data-footnote-ref="">2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn-_twenty-#{identifier}" id="fnref-_twenty-#{identifier}" data-footnote-ref="">3</a></sup></p>
-
- <section class="footnotes" data-footnotes><ol>
+ <p dir="auto">first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref>1</a></sup> and second<sup class="footnote-ref"><a href="#fn-%F0%9F%98%84second-#{identifier}" id="fnref-%F0%9F%98%84second-#{identifier}" data-footnote-ref>2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn-_twenty-#{identifier}" id="fnref-_twenty-#{identifier}" data-footnote-ref>3</a></sup></p>
+ <section data-footnotes class="footnotes">
+ <ol>
<li id="fn-1-#{identifier}">
- <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <p>one <a href="#fnref-1-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
<li id="fn-%F0%9F%98%84second-#{identifier}">
- <p>two <a href="#fnref-%F0%9F%98%84second-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <p>two <a href="#fnref-%F0%9F%98%84second-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
<li id="fn-_twenty-#{identifier}">
- <p>twenty <a href="#fnref-_twenty-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <p>twenty <a href="#fnref-_twenty-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
- </ol></section>
+ </ol>
+ </section>
EOF
end
it 'properly adds the necessary ids and classes' do
stub_commonmark_sourcepos_disabled
- expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote
+ expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote.strip
end
context 'using ruby-based HTML renderer' do
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
index 394fcc06eba..c8cd9d4fcac 100644
--- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
let(:markdown) { %Q(``` foo\\@bar\nfoo\n```) }
it 'renders correct html' do
- if Feature.enabled?(:use_cmark_renderer)
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
correct_html_included(markdown, %Q(<pre data-sourcepos="1:1-3:3" lang="foo@bar"><code>foo\n</code></pre>))
else
correct_html_included(markdown, %Q(<code lang="foo@bar">foo\n</code>))
diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb
index 4701caa0667..d31ccccd6c3 100644
--- a/spec/lib/banzai/reference_parser/base_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb
@@ -29,10 +29,10 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
describe '#project_for_node' do
it 'returns the Project for a node' do
- document = instance_double('document', fragment?: false)
- project = instance_double('project')
- object = instance_double('object', project: project)
- node = instance_double('node', document: document)
+ document = double('document', fragment?: false)
+ project = instance_double('Project')
+ object = double('object', project: project)
+ node = double('node', document: document)
context.associate_document(document, object)
diff --git a/spec/lib/banzai/render_context_spec.rb b/spec/lib/banzai/render_context_spec.rb
index c4b609b936e..4b5c2c5a7df 100644
--- a/spec/lib/banzai/render_context_spec.rb
+++ b/spec/lib/banzai/render_context_spec.rb
@@ -7,15 +7,15 @@ RSpec.describe Banzai::RenderContext do
describe '#project_for_node' do
it 'returns the default project if no associated project was found' do
- project = instance_double('project')
+ project = instance_double('Project')
context = described_class.new(project)
expect(context.project_for_node(document)).to eq(project)
end
it 'returns the associated project if one was associated explicitly' do
- project = instance_double('project')
- obj = instance_double('object', project: project)
+ project = instance_double('Project')
+ obj = double('object', project: project)
context = described_class.new
context.associate_document(document, obj)
@@ -24,8 +24,8 @@ RSpec.describe Banzai::RenderContext do
end
it 'returns the project associated with a DocumentFragment when using a node' do
- project = instance_double('project')
- obj = instance_double('object', project: project)
+ project = instance_double('Project')
+ obj = double('object', project: project)
context = described_class.new
node = document.children.first
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 623f9aa453a..1bbc96af8ee 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -38,11 +38,11 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'when response is not success' do
it 'raises BulkImports::Error' do
- response_double = double(code: 503, success?: false)
+ response_double = double(code: 503, success?: false, request: double(path: double(path: '/test')))
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test')
end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb
new file mode 100644
index 00000000000..6c5465c8a66
--- /dev/null
+++ b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
+
+ let(:entity) { create(:bulk_import_entity, group: group) }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ let(:first_page) { extracted_data(has_next_page: true) }
+ let(:last_page) { extracted_data(name: 'badge2') }
+
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(first_page, last_page)
+ end
+ end
+
+ it 'imports a group badge' do
+ expect { pipeline.run }.to change(Badge, :count).by(2)
+
+ badge = group.badges.last
+
+ expect(badge.name).to eq('badge2')
+ expect(badge.link_url).to eq(badge_data['link_url'])
+ expect(badge.image_url).to eq(badge_data['image_url'])
+ end
+
+ context 'when project entity' do
+ let(:first_page) { extracted_data(has_next_page: true) }
+ let(:last_page) { extracted_data(name: 'badge2', kind: 'project') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+
+ it 'imports a project badge & skips group badge' do
+ expect { pipeline.run }.to change(Badge, :count).by(1)
+
+ badge = project.badges.last
+
+ expect(badge.name).to eq('badge2')
+ expect(badge.link_url).to eq(badge_data['link_url'])
+ expect(badge.image_url).to eq(badge_data['image_url'])
+ expect(badge.type).to eq('ProjectBadge')
+ end
+ end
+
+ describe '#transform' do
+ it 'return transformed badge hash' do
+ badge = subject.transform(context, badge_data)
+
+ expect(badge[:name]).to eq('badge')
+ expect(badge[:link_url]).to eq(badge_data['link_url'])
+ expect(badge[:image_url]).to eq(badge_data['image_url'])
+ expect(badge.keys).to contain_exactly(:name, :link_url, :image_url)
+ end
+
+ context 'when data is blank' do
+ it 'does nothing when the data is blank' do
+ expect(subject.transform(context, nil)).to be_nil
+ end
+ end
+
+ context 'when project entity & group badge' do
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+
+ it 'returns' do
+ expect(subject.transform(context, { 'name' => 'test', 'kind' => 'group' })).to be_nil
+ end
+ end
+ end
+
+ def badge_data(name = 'badge', kind = 'group')
+ {
+ 'name' => name,
+ 'link_url' => 'https://gitlab.example.com',
+ 'image_url' => 'https://gitlab.example.com/image.png',
+ 'kind' => kind
+ }
+ end
+
+ def extracted_data(name: 'badge', kind: 'group', has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? '2' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name, kind)], page_info: page_info)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
index 9e3a6d5b8df..48db24def48 100644
--- a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
@@ -59,16 +59,6 @@ RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do
end
end
- context 'when label is persisted' do
- it 'does not save label' do
- label = create(:group_label, group: group)
-
- expect(label).not_to receive(:save!)
-
- subject.load(context, label)
- end
- end
-
context 'when label is missing' do
it 'returns' do
expect(subject.load(context, nil)).to be_nil
diff --git a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb
index 9f71175f46f..902b29bc365 100644
--- a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb
@@ -81,16 +81,6 @@ RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do
end
end
- context 'when milestone is persisted' do
- it 'does not save milestone' do
- milestone = create(:milestone, group: group)
-
- expect(milestone).not_to receive(:save!)
-
- subject.load(context, milestone)
- end
- end
-
context 'when milestone is missing' do
it 'returns' do
expect(subject.load(context, nil)).to be_nil
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index a3cc866a406..0f6238e10dc 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:project) { create(:project) }
- let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
- let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let_it_be(:group) { create(:group) }
+
+ let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
+ let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject(:pipeline) { described_class.new(context) }
@@ -24,57 +25,101 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
- describe '#run' do
- it 'imports uploads into destination portable and removes tmpdir' do
- allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
- allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ shared_examples 'uploads import' do
+ describe '#run' do
+ before do
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ end
- pipeline.run
+ it 'imports uploads into destination portable and removes tmpdir' do
+ pipeline.run
- expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
+ expect(portable.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
- expect(Dir.exist?(tmpdir)).to eq(false)
- end
- end
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
- describe '#extract' do
- it 'downloads & extracts upload paths' do
- allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
- expect(pipeline).to receive(:untar_zxf)
- file_download_service = instance_double("BulkImports::FileDownloadService")
+ context 'when importing avatar' do
+ let(:uploads_dir_path) { File.join(tmpdir, 'avatar') }
- expect(BulkImports::FileDownloadService)
- .to receive(:new)
- .with(
- configuration: context.configuration,
- relative_url: "/projects/test/export_relations/download?relation=uploads",
- dir: tmpdir,
- filename: 'uploads.tar.gz')
- .and_return(file_download_service)
+ it 'imports avatar' do
+ FileUtils.touch(File.join(uploads_dir_path, 'avatar.png'))
- expect(file_download_service).to receive(:execute)
+ expect_next_instance_of(entity.update_service) do |service|
+ expect(service).to receive(:execute)
+ end
- extracted_data = pipeline.extract(context)
+ pipeline.run
+ end
- expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
- end
- end
+ context 'when something goes wrong' do
+ it 'raises exception' do
+ allow_next_instance_of(entity.update_service) do |service|
+ allow(service).to receive(:execute).and_return(nil)
+ end
+
+ pipeline.run
- describe '#load' do
- it 'creates a file upload' do
- expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1)
+ expect(entity.failures.first.exception_class).to include('AvatarLoadingError')
+ end
+ end
+ end
end
- context 'when dynamic path is nil' do
- it 'returns' do
- expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count }
+ describe '#extract' do
+ it 'downloads & extracts upload paths' do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ expect(pipeline).to receive(:untar_zxf)
+ file_download_service = instance_double("BulkImports::FileDownloadService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads",
+ dir: tmpdir,
+ filename: 'uploads.tar.gz')
+ .and_return(file_download_service)
+
+ expect(file_download_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
end
end
- context 'when path is a directory' do
- it 'returns' do
- expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count }
+ describe '#load' do
+ it 'creates a file upload' do
+ expect { pipeline.load(context, upload_file_path) }.to change { portable.uploads.count }.by(1)
+ end
+
+ context 'when dynamic path is nil' do
+ it 'returns' do
+ expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { portable.uploads.count }
+ end
+ end
+
+ context 'when path is a directory' do
+ it 'returns' do
+ expect { pipeline.load(context, uploads_dir_path) }.not_to change { portable.uploads.count }
+ end
end
end
end
+
+ context 'when importing to group' do
+ let(:portable) { group }
+ let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
+
+ context 'when importing to project' do
+ let(:portable) { project }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
end
diff --git a/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb
new file mode 100644
index 00000000000..0a04c0a2243
--- /dev/null
+++ b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Rest::GetBadgesQuery do
+ describe '.to_h' do
+ shared_examples 'resource and page info query' do
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:encoded_full_path) { ERB::Util.url_encode(entity.source_full_path) }
+
+ it 'returns correct query and page info' do
+ expected = {
+ resource: [entity.pluralized_name, encoded_full_path, 'badges'].join('/'),
+ query: {
+ page: context.tracker.next_page
+ }
+ }
+
+ expect(described_class.to_h(context)).to eq(expected)
+ end
+ end
+
+ context 'when entity is group' do
+ let(:entity) { create(:bulk_import_entity) }
+
+ include_examples 'resource and page info query'
+ end
+
+ context 'when entity is project' do
+ let(:entity) { create(:bulk_import_entity, :project_entity) }
+
+ include_examples 'resource and page info query'
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
deleted file mode 100644
index 9fa35c4707d..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
+++ /dev/null
@@ -1,116 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::BadgesPipeline do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
-
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
- destination_namespace: group.full_path,
- group: group
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'imports a group badge' do
- first_page = extracted_data(has_next_page: true)
- last_page = extracted_data(name: 'badge2')
-
- allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor|
- allow(extractor)
- .to receive(:extract)
- .and_return(first_page, last_page)
- end
-
- expect { subject.run }.to change(Badge, :count).by(2)
-
- badge = group.badges.last
-
- expect(badge.name).to eq('badge2')
- expect(badge.link_url).to eq(badge_data['link_url'])
- expect(badge.image_url).to eq(badge_data['image_url'])
- end
-
- describe '#load' do
- it 'creates a badge' do
- expect { subject.load(context, badge_data) }.to change(Badge, :count).by(1)
-
- badge = group.badges.first
-
- badge_data.each do |key, value|
- expect(badge[key]).to eq(value)
- end
- end
-
- it 'does nothing when the data is blank' do
- expect { subject.load(context, nil) }.not_to change(Badge, :count)
- end
- end
-
- describe '#transform' do
- it 'return transformed badge hash' do
- badge = subject.transform(context, badge_data)
-
- expect(badge[:name]).to eq('badge')
- expect(badge[:link_url]).to eq(badge_data['link_url'])
- expect(badge[:image_url]).to eq(badge_data['image_url'])
- expect(badge.keys).to contain_exactly(:name, :link_url, :image_url)
- end
-
- context 'when data is blank' do
- it 'does nothing when the data is blank' do
- expect(subject.transform(context, nil)).to be_nil
- end
- end
- end
-
- describe 'pipeline parts' do
- it { expect(described_class).to include_module(BulkImports::Pipeline) }
- it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
-
- it 'has extractors' do
- expect(described_class.get_extractor)
- .to eq(
- klass: BulkImports::Common::Extractors::RestExtractor,
- options: {
- query: BulkImports::Groups::Rest::GetBadgesQuery
- }
- )
- end
-
- it 'has transformers' do
- expect(described_class.transformers)
- .to contain_exactly(
- { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
- )
- end
- end
-
- def badge_data(name = 'badge')
- {
- 'name' => name,
- 'link_url' => 'https://gitlab.example.com',
- 'image_url' => 'https://gitlab.example.com/image.png'
- }
- end
-
- def extracted_data(name: 'badge', has_next_page: false)
- page_info = {
- 'has_next_page' => has_next_page,
- 'next_page' => has_next_page ? '2' : nil
- }
-
- BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name)], page_info: page_info)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
deleted file mode 100644
index c68284aa580..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
-
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- group: group,
- bulk_import: bulk_import,
- source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
- destination_namespace: group.full_path
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'updates the group avatar' do
- avatar_path = 'spec/fixtures/dk.png'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect { subject.run }.to change(context.group, :avatar)
-
- expect(context.group.avatar.filename).to eq(File.basename(avatar_path))
- end
-
- it 'raises an error when the avatar upload fails' do
- avatar_path = 'spec/fixtures/aosp_manifest.xml'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error)
- .with(
- bulk_import_id: context.bulk_import.id,
- bulk_import_entity_id: context.entity.id,
- bulk_import_entity_type: context.entity.source_type,
- context_extra: context.extra,
- exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError",
- exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon",
- pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline",
- pipeline_step: :loader
- )
- end
-
- expect { subject.run }.to change(BulkImports::Failure, :count)
- end
- end
-
- def stub_file_download(filepath = 'file/path.png', **params)
- expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader|
- expect(downloader).to receive(:execute).and_return(filepath)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb
deleted file mode 100644
index eef6848e118..00000000000
--- a/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Rest::GetBadgesQuery do
- describe '.to_h' do
- it 'returns query resource and page info' do
- entity = create(:bulk_import_entity)
- tracker = create(:bulk_import_tracker, entity: entity)
- context = BulkImports::Pipeline::Context.new(tracker)
- encoded_full_path = ERB::Util.url_encode(entity.source_full_path)
- expected = {
- resource: ['groups', encoded_full_path, 'badges'].join('/'),
- query: {
- page: context.tracker.next_page
- }
- }
-
- expect(described_class.to_h(context)).to eq(expected)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 5719acac4d7..55a8e40f480 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -8,13 +8,13 @@ RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::MilestonesPipeline],
- [1, BulkImports::Groups::Pipelines::BadgesPipeline],
- [2, BulkImports::Common::Pipelines::BoardsPipeline]
+ [1, BulkImports::Common::Pipelines::BadgesPipeline],
+ [2, BulkImports::Common::Pipelines::BoardsPipeline],
+ [2, BulkImports::Common::Pipelines::UploadsPipeline]
]
end
@@ -24,7 +24,7 @@ RSpec.describe BulkImports::Groups::Stage do
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.new(bulk_import).pipelines & pipelines).to eq(pipelines)
+ expect(described_class.new(bulk_import).pipelines & pipelines).to contain_exactly(*pipelines)
expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end
diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
index c5197fb29d9..8ea6ceb7619 100644
--- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
@@ -130,6 +130,22 @@ RSpec.describe BulkImports::NdjsonPipeline do
subject.transform(context, data)
end
+
+ context 'when data is nil' do
+ before do
+ expect(Gitlab::ImportExport::Group::RelationFactory).not_to receive(:create)
+ end
+
+ it 'returns' do
+ expect(subject.transform(nil, nil)).to be_nil
+ end
+
+ context 'when relation hash is nil' do
+ it 'returns' do
+ expect(subject.transform(nil, [nil, 0])).to be_nil
+ end
+ end
+ end
end
describe '#load' do
@@ -143,16 +159,6 @@ RSpec.describe BulkImports::NdjsonPipeline do
end
end
- context 'when object is persisted' do
- it 'does not save the object' do
- object = double(persisted?: true)
-
- expect(object).not_to receive(:save!)
-
- subject.load(nil, object)
- end
- end
-
context 'when object is missing' do
it 'returns' do
expect(subject.load(nil, nil)).to be_nil
diff --git a/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
new file mode 100644
index 00000000000..b680fa5cbfc
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Graphql::GetSnippetRepositoryQuery do
+ describe 'query repository based on full_path' do
+ let_it_be(:entity) { create(:bulk_import_entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ it 'has a valid query' do
+ query = GraphQL::Query.new(
+ GitlabSchema,
+ described_class.to_s,
+ variables: described_class.variables(context)
+ )
+ result = GitlabSchema.static_validator.validate(query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ it 'returns snippet httpUrlToRepo' do
+ expect(described_class.to_s).to include('httpUrlToRepo')
+ end
+
+ it 'returns snippet createdAt' do
+ expect(described_class.to_s).to include('createdAt')
+ end
+
+ it 'returns snippet title' do
+ expect(described_class.to_s).to include('title')
+ end
+
+ describe '.variables' do
+ it 'queries project based on source_full_path and pagination' do
+ expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 }
+
+ expect(described_class.variables(context)).to eq(expected)
+ end
+ end
+
+ describe '.data_path' do
+ it '.data_path returns data path' do
+ expected = %w[data project snippets nodes]
+
+ expect(described_class.data_path).to eq(expected)
+ end
+ end
+
+ describe '.page_info_path' do
+ it '.page_info_path returns pagination information path' do
+ expected = %w[data project snippets page_info]
+
+ expect(described_class.page_info_path).to eq(expected)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb
new file mode 100644
index 00000000000..e2744a6a457
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:auto_devops) do
+ {
+ 'created_at' => '2016-06-13T15:02:47.967Z',
+ 'updated_at' => '2016-06-14T15:02:47.967Z',
+ 'enabled' => true,
+ 'deploy_strategy' => 'continuous'
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports auto devops options into destination project' do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [auto_devops]))
+ end
+
+ pipeline.run
+
+ expect(project.auto_devops.enabled).to be_truthy
+ expect(project.auto_devops.deploy_strategy).to eq('continuous')
+ expect(project.auto_devops.created_at).to eq('2016-06-13T15:02:47.967Z')
+ expect(project.auto_devops.updated_at).to eq('2016-06-14T15:02:47.967Z')
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
new file mode 100644
index 00000000000..98a2e8b6a57
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let(:ci_pipeline_attributes) { {} }
+ let(:ci_pipeline) do
+ {
+ sha: "fakesha",
+ ref: "fakeref",
+ project: project,
+ source: "web"
+ }.merge(ci_pipeline_attributes)
+ end
+
+ let(:ci_pipeline2) do
+ {
+ sha: "fakesha2",
+ ref: "fakeref2",
+ project: project,
+ source: "web"
+ }.merge(ci_pipeline_attributes)
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(
+ BulkImports::Pipeline::ExtractedData.new(data: [ci_pipeline, ci_pipeline2])
+ )
+ end
+
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_source_branch!)
+ end
+
+ pipeline.run
+ end
+
+ it 'imports Ci::Pipeline into destination project' do
+ expect(project.all_pipelines.count).to eq(2)
+ expect(project.ci_pipelines.first.sha).to eq('fakesha')
+ expect(project.ci_pipelines.second.sha).to eq('fakesha2')
+ end
+
+ context 'notes' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'notes' => [
+ {
+ 'note' => 'test note',
+ 'author_id' => 22,
+ 'noteable_type' => 'Commit',
+ 'sha' => '',
+ 'author' => {
+ 'name' => 'User 22'
+ },
+ 'commit_id' => 'fakesha',
+ 'updated_at' => '2016-06-14T15:02:47.770Z',
+ 'events' => [
+ {
+ 'action' => 'created',
+ 'author_id' => 22
+ }
+ ]
+ }
+ ]
+ }
+ end
+
+ it 'imports pipeline with notes' do
+ note = project.all_pipelines.first.notes.first
+ expect(note.note).to include('test note')
+ expect(note.events.first.action).to eq('created')
+ end
+ end
+
+ context 'stages' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'stages' => [
+ {
+ 'name' => 'test stage',
+ 'statuses' => [
+ {
+ 'name' => 'first status',
+ 'status' => 'created'
+ }
+ ]
+ }
+ ]
+ }
+ end
+
+ it 'imports pipeline with notes' do
+ stage = project.all_pipelines.first.stages.first
+ expect(stage.name).to eq('test stage')
+ expect(stage.statuses.first.name).to eq('first status')
+ end
+ end
+
+ context 'external pull request' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'source' => 'external_pull_request_event',
+ 'external_pull_request' => {
+ 'source_branch' => 'test source branch',
+ 'target_branch' => 'master',
+ 'source_sha' => 'testsha',
+ 'target_sha' => 'targetsha',
+ 'source_repository' => 'test repository',
+ 'target_repository' => 'test repository',
+ 'status' => 1,
+ 'pull_request_iid' => 1
+ }
+ }
+ end
+
+ it 'imports pipeline with external pull request' do
+ pull_request = project.all_pipelines.first.external_pull_request
+ expect(pull_request.source_branch).to eq('test source branch')
+ expect(pull_request.status).to eq('open')
+ end
+ end
+
+ context 'merge request' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'source' => 'merge_request_event',
+ 'merge_request' => {
+ 'description' => 'test merge request',
+ 'title' => 'test MR',
+ 'source_branch' => 'test source branch',
+ 'target_branch' => 'master',
+ 'source_sha' => 'testsha',
+ 'target_sha' => 'targetsha',
+ 'source_repository' => 'test repository',
+ 'target_repository' => 'test repository',
+ 'target_project_id' => project.id,
+ 'source_project_id' => project.id,
+ 'author_id' => user.id
+ }
+ }
+ end
+
+ it 'imports pipeline with external pull request' do
+ merge_request = project.all_pipelines.first.merge_request
+ expect(merge_request.source_branch).to eq('test source branch')
+ expect(merge_request.description).to eq('test merge request')
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
new file mode 100644
index 00000000000..9dac8e45ef9
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let_it_be(:policy) do
+ {
+ 'created_at' => '2019-12-13 13:45:04 UTC',
+ 'updated_at' => '2019-12-14 13:45:04 UTC',
+ 'next_run_at' => '2019-12-15 13:45:04 UTC',
+ 'name_regex' => 'test',
+ 'name_regex_keep' => 'regex_keep',
+ 'cadence' => '3month',
+ 'older_than' => '1month',
+ 'keep_n' => 100,
+ 'enabled' => true
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[policy, 0]]))
+ end
+
+ pipeline.run
+
+ policy.each_pair do |key, value|
+ expect(entity.project.container_expiration_policy.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
new file mode 100644
index 00000000000..12713f008bb
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:schedule_attributes) { {} }
+ let(:schedule) do
+ {
+ 'description' => 'test pipeline schedule',
+ 'cron' => '1 1 1 1 1',
+ 'cron_timezone' => 'UTC',
+ 'ref' => 'testref',
+ 'created_at' => '2016-06-13T15:02:47.967Z',
+ 'updated_at' => '2016-06-14T15:02:47.967Z'
+ }.merge(schedule_attributes)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [schedule]))
+ end
+
+ pipeline.run
+ end
+
+ it 'imports schedule into destination project' do
+ expect(project.pipeline_schedules.count).to eq(1)
+ pipeline_schedule = project.pipeline_schedules.first
+ schedule.each do |k, v|
+ expect(pipeline_schedule.send(k)).to eq(v)
+ end
+ end
+
+ context 'is active' do
+ let(:schedule_attributes) { { 'active' => true } }
+
+ it 'imports the schedule but active is false' do
+ expect(project.pipeline_schedules.first.active).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
new file mode 100644
index 00000000000..11c475318bb
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:extra) { {} }
+ let(:project_attributes) do
+ {
+ 'description' => 'description',
+ 'visibility_level' => 0,
+ 'archived' => false,
+ 'merge_requests_template' => 'test',
+ 'merge_requests_rebase_enabled' => true,
+ 'approvals_before_merge' => 0,
+ 'reset_approvals_on_push' => true,
+ 'merge_requests_ff_only_enabled' => true,
+ 'issues_template' => 'test',
+ 'shared_runners_enabled' => true,
+ 'build_coverage_regex' => 'build_coverage_regex',
+ 'build_allow_git_fetch' => true,
+ 'build_timeout' => 3600,
+ 'pending_delete' => false,
+ 'public_builds' => true,
+ 'last_repository_check_failed' => nil,
+ 'only_allow_merge_if_pipeline_succeeds' => true,
+ 'has_external_issue_tracker' => false,
+ 'request_access_enabled' => true,
+ 'has_external_wiki' => false,
+ 'ci_config_path' => nil,
+ 'only_allow_merge_if_all_discussions_are_resolved' => true,
+ 'printing_merge_request_link_enabled' => true,
+ 'auto_cancel_pending_pipelines' => 'enabled',
+ 'service_desk_enabled' => false,
+ 'delete_error' => nil,
+ 'disable_overriding_approvers_per_merge_request' => true,
+ 'resolve_outdated_diff_discussions' => true,
+ 'jobs_cache_index' => nil,
+ 'external_authorization_classification_label' => nil,
+ 'pages_https_only' => false,
+ 'merge_requests_author_approval' => false,
+ 'merge_requests_disable_committers_approval' => true,
+ 'require_password_to_approve' => true,
+ 'remove_source_branch_after_merge' => true,
+ 'autoclose_referenced_issues' => true,
+ 'suggestion_commit_message' => 'Test!'
+ }.merge(extra)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ before do
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
+
+ pipeline.run
+ end
+
+ it 'imports project attributes', :aggregate_failures do
+ project_attributes.each_pair do |key, value|
+ expect(project.public_send(key)).to eq(value)
+ end
+ end
+
+ context 'when project is archived' do
+ let(:extra) { { 'archived' => true } }
+
+ it 'sets project as archived' do
+ expect(project.archived).to eq(true)
+ end
+ end
+ end
+
+ describe '#extract' do
+ before do
+ file_download_service = instance_double("BulkImports::FileDownloadService")
+ file_decompression_service = instance_double("BulkImports::FileDecompressionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
+ dir: tmpdir,
+ filename: 'self.json.gz')
+ .and_return(file_download_service)
+
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(dir: tmpdir, filename: 'self.json.gz')
+ .and_return(file_decompression_service)
+
+ expect(file_download_service).to receive(:execute)
+ expect(file_decompression_service).to receive(:execute)
+ end
+
+ it 'downloads, decompresses & decodes json' do
+ allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
+ end
+
+ context 'when json parsing error occurs' do
+ it 'raises an error' do
+ allow(pipeline).to receive(:json_attributes).and_return("invalid")
+
+ expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
+ end
+ end
+ end
+
+ describe '#transform' do
+ it 'removes prohibited attributes from hash' do
+ input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
+
+ expect(Gitlab::ImportExport::AttributeCleaner).to receive(:clean).and_call_original
+
+ expect(pipeline.transform(context, input)).to eq({ 'description' => 'description' })
+ end
+ end
+
+ describe '#load' do
+ it 'assigns attributes, drops visibility and reconciles shared runner setting' do
+ expect(project).to receive(:assign_attributes).with(project_attributes)
+ expect(project).to receive(:reconcile_shared_runners_setting!)
+ expect(project).to receive(:drop_visibility_level!)
+ expect(project).to receive(:save!)
+
+ pipeline.load(context, project_attributes)
+ end
+ end
+
+ describe '#json_attributes' do
+ it 'reads raw json from file' do
+ filepath = File.join(tmpdir, 'self.json')
+
+ FileUtils.touch(filepath)
+ expect_file_read(filepath)
+
+ pipeline.json_attributes
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb
new file mode 100644
index 00000000000..1f0defdd20c
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ProjectFeaturePipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let_it_be(:project_feature) do
+ {
+ "builds_access_level": 10,
+ "wiki_access_level": 10,
+ "issues_access_level": 10,
+ "merge_requests_access_level": 10,
+ "snippets_access_level": 10,
+ "repository_access_level": 10,
+ "pages_access_level": 10,
+ "forking_access_level": 10,
+ "metrics_dashboard_access_level": 10,
+ "operations_access_level": 10,
+ "analytics_access_level": 10,
+ "security_and_compliance_access_level": 10,
+ "container_registry_access_level": 10,
+ "updated_at": "2016-09-23T11:58:28.000Z",
+ "created_at": "2014-12-26T09:26:45.000Z"
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[project_feature, 0]]))
+ end
+
+ pipeline.run
+
+ project_feature.each_pair do |key, value|
+ expect(entity.project.project_feature.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
index 583485faf8d..38b22538e70 100644
--- a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
@@ -47,6 +47,17 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do
end
end
+ context 'project has no repository' do
+ let(:project_data) { { 'httpUrlToRepo' => '' } }
+
+ it 'skips repository import' do
+ expect(context.portable).not_to receive(:ensure_repository)
+ expect(context.portable.repository).not_to receive(:fetch_as_mirror)
+
+ pipeline.run
+ end
+ end
+
context 'blocked local networks' do
let(:project_data) { { 'httpUrlToRepo' => 'http://localhost/foo.git' } }
diff --git a/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb
new file mode 100644
index 00000000000..2dfa036fc48
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let_it_be(:setting) { { 'issue_template_key' => 'test', 'project_key' => 'key' } }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[setting, 0]]))
+ end
+
+ pipeline.run
+
+ setting.each_pair do |key, value|
+ expect(entity.project.service_desk_setting.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
new file mode 100644
index 00000000000..dae879de998
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:snippet_attributes) { {} }
+ let(:exported_snippet) do
+ {
+ 'id' => 25,
+ 'title' => 'Snippet with 2 files',
+ 'content' => 'content',
+ 'author_id' => 22,
+ 'project_id' => 6,
+ 'created_at' => '2021-10-28T20:21:59.712Z',
+ 'updated_at' => '2021-10-28T20:31:10.408Z',
+ 'file_name' => 'galactic_empire.rb',
+ 'visibility_level' => 0,
+ 'description' => 'How to track your Galactic armies.'
+ }.merge(snippet_attributes)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ group.add_owner(user)
+ snippet_with_index = [exported_snippet.dup, 0]
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [snippet_with_index]))
+ end
+
+ pipeline.run
+ end
+
+ it 'imports snippet into destination project' do
+ imported_snippet = project.snippets.last
+
+ expect(imported_snippet).to have_attributes(
+ title: exported_snippet['title'],
+ content: exported_snippet['content'],
+ author_id: user.id,
+ created_at: DateTime.parse(exported_snippet['created_at']),
+ updated_at: DateTime.parse(exported_snippet['updated_at']),
+ file_name: exported_snippet['file_name'],
+ visibility_level: exported_snippet['visibility_level'])
+ end
+
+ context 'with award_emoji' do
+ let(:snippet_attributes) { { 'award_emoji' => [expected_award] } }
+ let(:expected_award) do
+ {
+ 'id' => 580,
+ 'name' => 'rocket',
+ 'user_id' => 1,
+ 'awardable_type' => 'Snippet',
+ 'created_at' => '2021-10-28T20:30:25.802Z',
+ 'updated_at' => '2021-10-28T20:30:25.802Z'
+ }
+ end
+
+ it 'restores the award_emoji' do
+ snippet_award = project.snippets.first.award_emoji.first
+
+ expect(snippet_award).to have_attributes(
+ name: expected_award['name'],
+ user_id: user.id,
+ awardable_type: expected_award['awardable_type'],
+ created_at: DateTime.parse(expected_award['created_at']),
+ updated_at: DateTime.parse(expected_award['updated_at']))
+ end
+ end
+
+ context 'with notes', :freeze_time do
+ # To properly emulate a fixture that is expected to be read from a file, we dump a json
+ # object, then parse it right away. We expected that some attrs like Datetimes be
+ # converted to Strings.
+ let(:exported_snippet) { Gitlab::Json.parse(note.noteable.attributes.merge('notes' => notes).to_json) }
+ let(:note) { create(:note_on_project_snippet, :with_attachment) }
+ let(:notes) { [note.attributes.merge('author' => { 'name' => note.author.name })] }
+
+ it 'restores the notes' do
+ snippet_note = project.snippets.last.notes.first
+ author_name = note.author.name
+ note_updated_at = exported_snippet['notes'].first['updated_at'].split('.').first
+
+ expect(snippet_note).to have_attributes(
+ note: note.note + "\n\n *By #{author_name} on #{note_updated_at} (imported from GitLab)*",
+ noteable_type: note.noteable_type,
+ author_id: user.id,
+ updated_at: note.updated_at,
+ line_code: note.line_code,
+ commit_id: note.commit_id,
+ system: note.system,
+ st_diff: note.st_diff,
+ updated_by_id: user.id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
new file mode 100644
index 00000000000..9897e74ec7b
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:bulk_import) { create(:bulk_import, user: user) }
+ let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+ let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z")}
+ let(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import_configuration.bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: project.full_path
+ )
+ end
+
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ let(:http_url_to_repo) { 'https://example.com/foo/bar/snippets/42.git' }
+ let(:data) do
+ [
+ {
+ 'title' => matched_snippet.title,
+ 'httpUrlToRepo' => http_url_to_repo,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ let(:page_info) do
+ {
+ 'next_page' => 'eyJpZCI6IjIyMDA2OTYifQ',
+ 'has_next_page' => false
+ }
+ end
+
+ let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) }
+
+ describe 'extractor' do
+ it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do
+ expect(described_class.get_extractor).to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: {
+ query: BulkImports::Projects::Graphql::GetSnippetRepositoryQuery
+ })
+ end
+ end
+
+ describe '#run' do
+ let(:validation_response) { double(Hash, 'error?': false) }
+
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(extracted_data)
+ end
+
+ allow_next_instance_of(Snippets::RepositoryValidationService) do |repository_validation|
+ allow(repository_validation).to receive(:execute).and_return(validation_response)
+ end
+ end
+
+ shared_examples 'skippable snippet' do
+ it 'does not create snippet repo' do
+ pipeline.run
+
+ expect(Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists?).to be false
+ end
+ end
+
+ context 'when a snippet is not matched' do
+ let(:data) do
+ [
+ {
+ 'title' => 'unmatched title',
+ 'httpUrlToRepo' => http_url_to_repo,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when httpUrlToRepo is empty' do
+ let(:data) do
+ [
+ {
+ 'title' => matched_snippet.title,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when a snippet matches' do
+ context 'when snippet url is valid' do
+ it 'creates snippet repo' do
+ expect { pipeline.run }
+ .to change { Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists? }.to true
+ end
+
+ it 'updates snippets statistics' do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_as_mirror)
+ end
+
+ service = double(Snippets::UpdateStatisticsService)
+
+ expect(Snippets::UpdateStatisticsService).to receive(:new).with(kind_of(Snippet)).and_return(service)
+ expect(service).to receive(:execute)
+
+ pipeline.run
+ end
+
+ it 'fetches snippet repo from url' do
+ expect_next_instance_of(Repository) do |repository|
+ expect(repository)
+ .to receive(:fetch_as_mirror)
+ .with("https://oauth2:#{bulk_import_configuration.access_token}@example.com/foo/bar/snippets/42.git")
+ end
+
+ pipeline.run
+ end
+ end
+
+ context 'when url is invalid' do
+ let(:http_url_to_repo) { 'http://0.0.0.0' }
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when snippet is invalid' do
+ let(:validation_response) { double(Hash, 'error?': true) }
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_as_mirror)
+ end
+ end
+
+ it 'does not leave a hanging SnippetRepository behind' do
+ pipeline.run
+
+ expect(SnippetRepository.where(snippet_id: matched_snippet.id).exists?).to be false
+ end
+
+ it 'does not call UpdateStatisticsService' do
+ expect(Snippets::UpdateStatisticsService).not_to receive(:new)
+
+ pipeline.run
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index e7670085f60..81cbdcae9d1 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,20 +2,32 @@
require 'spec_helper'
+# Any new stages must be added to
+# `ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb` as well.
RSpec.describe BulkImports::Projects::Stage do
let(:pipelines) do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Projects::Pipelines::RepositoryPipeline],
+ [1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
[2, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::MilestonesPipeline],
+ [2, BulkImports::Common::Pipelines::BadgesPipeline],
[3, BulkImports::Projects::Pipelines::IssuesPipeline],
+ [3, BulkImports::Projects::Pipelines::SnippetsPipeline],
+ [4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
[4, BulkImports::Common::Pipelines::BoardsPipeline],
[4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline],
+ [4, BulkImports::Projects::Pipelines::CiPipelinesPipeline],
+ [4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
+ [4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
+ [4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
+ [5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
+ [5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]
]
end
diff --git a/spec/lib/error_tracking/collector/payload_validator_spec.rb b/spec/lib/error_tracking/collector/payload_validator_spec.rb
index 852cf9eac6c..ab5ec448dff 100644
--- a/spec/lib/error_tracking/collector/payload_validator_spec.rb
+++ b/spec/lib/error_tracking/collector/payload_validator_spec.rb
@@ -3,16 +3,18 @@
require 'spec_helper'
RSpec.describe ErrorTracking::Collector::PayloadValidator do
+ let(:validator) { described_class.new }
+
describe '#valid?' do
RSpec.shared_examples 'valid payload' do
- it 'returns true' do
- expect(described_class.new.valid?(payload)).to be_truthy
+ specify do
+ expect(validator).to be_valid(payload)
end
end
RSpec.shared_examples 'invalid payload' do
- it 'returns false' do
- expect(described_class.new.valid?(payload)).to be_falsey
+ specify do
+ expect(validator).not_to be_valid(payload)
end
end
@@ -28,6 +30,12 @@ RSpec.describe ErrorTracking::Collector::PayloadValidator do
it_behaves_like 'valid payload'
end
+ context 'python payload in repl' do
+ let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/python_event_repl.json')) }
+
+ it_behaves_like 'valid payload'
+ end
+
context 'browser payload' do
let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/browser_event.json')) }
diff --git a/spec/lib/feature/definition_spec.rb b/spec/lib/feature/definition_spec.rb
index 21120012927..2f95f8eeab7 100644
--- a/spec/lib/feature/definition_spec.rb
+++ b/spec/lib/feature/definition_spec.rb
@@ -161,6 +161,41 @@ RSpec.describe Feature::Definition do
end
end
+ describe '.for_upcoming_milestone?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:definition) do
+ Feature::Definition.new("development/enabled_feature_flag.yml",
+ name: :enabled_feature_flag,
+ type: 'development',
+ milestone: milestone,
+ default_enabled: false)
+ end
+
+ before do
+ allow(Feature::Definition).to receive(:definitions) do
+ { definition.key => definition }
+ end
+
+ allow(Gitlab).to receive(:version_info).and_return(Gitlab::VersionInfo.parse(current_milestone))
+ end
+
+ subject { definition.for_upcoming_milestone? }
+
+ where(:ctx, :milestone, :current_milestone, :expected) do
+ 'no milestone' | nil | '1.0.0' | false
+ 'upcoming milestone - major' | '2.3' | '1.9.999' | true
+ 'upcoming milestone - minor' | '2.3' | '2.2.999' | true
+ 'current milestone' | '2.3' | '2.3.999' | true
+ 'past milestone - major' | '1.9' | '2.3.999' | false
+ 'past milestone - minor' | '2.2' | '2.3.999' | false
+ end
+
+ with_them do
+ it {is_expected.to be(expected)}
+ end
+ end
+
describe '.valid_usage!' do
before do
allow(described_class).to receive(:definitions) do
@@ -215,7 +250,42 @@ RSpec.describe Feature::Definition do
end
end
- describe '.defaul_enabled?' do
+ describe '.log_states?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:definition) do
+ Feature::Definition.new("development/enabled_feature_flag.yml",
+ name: :enabled_feature_flag,
+ type: 'development',
+ milestone: milestone,
+ log_state_changes: log_state_change,
+ default_enabled: false)
+ end
+
+ before do
+ allow(Feature::Definition).to receive(:definitions) do
+ { definition.key => definition }
+ end
+
+ allow(Gitlab).to receive(:version_info).and_return(Gitlab::VersionInfo.new(10, 0, 0))
+ end
+
+ subject { Feature::Definition.log_states?(key) }
+
+ where(:ctx, :key, :milestone, :log_state_change, :expected) do
+ 'When flag does not exist' | :no_flag | "0.0" | true | false
+ 'When flag is old, and logging is not forced' | :enabled_feature_flag | "0.0" | false | false
+ 'When flag is old, but logging is forced' | :enabled_feature_flag | "0.0" | true | true
+ 'When flag is current' | :enabled_feature_flag | "10.0" | true | true
+ 'Flag is upcoming' | :enabled_feature_flag | "10.0" | true | true
+ end
+
+ with_them do
+ it { is_expected.to be(expected) }
+ end
+ end
+
+ describe '.default_enabled?' do
subject { described_class.default_enabled?(key) }
context 'when feature flag exist' do
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 58e7292c125..82580d5d700 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -127,6 +127,10 @@ RSpec.describe Feature, stub_feature_flags: false do
end
describe '.enabled?' do
+ before do
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ end
+
it 'returns false for undefined feature' do
expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
end
@@ -179,6 +183,35 @@ RSpec.describe Feature, stub_feature_flags: false do
expect(described_class.enabled?(:a_feature, default_enabled: fake_default)).to eq(fake_default)
end
+ context 'logging is enabled', :request_store do
+ before do
+ allow(Feature).to receive(:log_feature_flag_states?).and_call_original
+
+ definition = Feature::Definition.new("development/enabled_feature_flag.yml",
+ name: :enabled_feature_flag,
+ type: 'development',
+ log_state_changes: true,
+ default_enabled: false)
+
+ allow(Feature::Definition).to receive(:definitions) do
+ { definition.key => definition }
+ end
+
+ described_class.enable(:feature_flag_state_logs)
+ described_class.enable(:enabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
+
+ it 'does not log feature_flag_state_logs' do
+ expect(described_class.logged_states).not_to have_key("feature_flag_state_logs")
+ end
+
+ it 'logs other feature flags' do
+ expect(described_class.logged_states).to have_key(:enabled_feature_flag)
+ expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy
+ end
+ end
+
context 'cached feature flag', :request_store do
let(:flag) { :some_feature_flag }
@@ -491,6 +524,82 @@ RSpec.describe Feature, stub_feature_flags: false do
end
end
+ describe '.log_feature_flag_states?' do
+ let(:log_state_changes) { false }
+ let(:milestone) { "0.0" }
+ let(:flag_name) { :some_flag }
+ let(:definition) do
+ Feature::Definition.new("development/#{flag_name}.yml",
+ name: flag_name,
+ type: 'development',
+ milestone: milestone,
+ log_state_changes: log_state_changes,
+ default_enabled: false)
+ end
+
+ before do
+ Feature.enable(:feature_flag_state_logs)
+ Feature.enable(:some_flag)
+
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original
+ allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original
+
+ allow(Feature::Definition).to receive(:definitions) do
+ { definition.key => definition }
+ end
+ end
+
+ subject { described_class.log_feature_flag_states?(flag_name) }
+
+ context 'when flag is feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
+ let(:flag_name) { :feature_flag_state_logs }
+ let(:log_state_changes) { true }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when flag is old' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when flag is old while log_state_changes is not present ' do
+ let(:definition) do
+ Feature::Definition.new("development/#{flag_name}.yml",
+ name: flag_name,
+ type: 'development',
+ milestone: milestone,
+ default_enabled: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when flag is old but log_state_changes is true' do
+ let(:log_state_changes) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when flag is new and not feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when milestone is nil' do
+ let(:definition) do
+ Feature::Definition.new("development/#{flag_name}.yml",
+ name: flag_name,
+ type: 'development',
+ default_enabled: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
let(:actor) { stub_feature_flag_gate('CustomActor:5') }
let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
diff --git a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
index e497551bc3f..4cba9732c22 100644
--- a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
@@ -28,8 +28,14 @@ RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator, :silence_stdout
weekly_metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*i_test_event_weekly.yml')).first
monthly_metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_28d/*i_test_event_monthly.yml')).first
- expect(YAML.safe_load(File.read(weekly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly")
- expect(YAML.safe_load(File.read(monthly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly")
+ weekly_metric_definition = YAML.safe_load(File.read(weekly_metric_definition_path))
+ monthly_metric_definition = YAML.safe_load(File.read(monthly_metric_definition_path))
+
+ expect(weekly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly")
+ expect(monthly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly")
+
+ expect(weekly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric')
+ expect(monthly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric')
end
context 'with ee option' do
@@ -49,9 +55,11 @@ RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator, :silence_stdout
expect(weekly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly")
expect(weekly_metric_definition["distribution"]).to include('ee')
+ expect(weekly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric')
expect(monthly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly")
expect(monthly_metric_definition["distribution"]).to include('ee')
+ expect(monthly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric')
end
end
end
diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
index b67425ae012..6a30bcd0e2c 100644
--- a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do
let(:key_path) { 'counts_weekly.test_metric' }
let(:dir) { '7d' }
+ let(:class_name) { 'Count' }
let(:temp_dir) { Dir.mktmpdir }
before do
@@ -33,7 +34,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do
let(:metric_definition_path) { Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first }
it 'creates a metric definition file using the template' do
- described_class.new([key_path], { 'dir' => dir }).invoke_all
+ described_class.new([key_path], { 'dir' => dir, 'class_name' => class_name }).invoke_all
expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric)
end
end
@@ -48,14 +49,14 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do
end
it 'creates a metric definition file using the template' do
- described_class.new([key_path], { 'dir' => dir, 'ee': true }).invoke_all
+ described_class.new([key_path], { 'dir' => dir, 'class_name' => class_name, 'ee': true }).invoke_all
expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric)
end
end
end
describe 'Validation' do
- let(:options) { [key_path, '--dir', dir] }
+ let(:options) { [key_path, '--dir', dir, '--class_name', class_name] }
subject { described_class.start(options) }
@@ -93,7 +94,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do
describe 'Name suggestions' do
it 'adds name key to metric definition' do
expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('some name')
- described_class.new([key_path], { 'dir' => dir }).invoke_all
+ described_class.new([key_path], { 'dir' => dir, 'class_name' => class_name }).invoke_all
metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
expect(YAML.safe_load(File.read(metric_definition_path))).to include("name" => "some name")
@@ -104,7 +105,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do
let(:key_paths) { ['counts_weekly.test_metric', 'counts_weekly.test1_metric'] }
it 'creates multiple files' do
- described_class.new(key_paths, { 'dir' => dir }).invoke_all
+ described_class.new(key_paths, { 'dir' => dir, 'class_name' => class_name }).invoke_all
files = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_metric.yml'))
expect(files.count).to eq(2)
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
index 045cdb129cb..55ba6e56237 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
@@ -41,6 +41,19 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
it_behaves_like 'match returned records'
end
+ context 'when intervalstyle setting is configured to "postgres"' do
+ it 'avoids nil durations' do
+ # ActiveRecord cannot parse the 'postgres' intervalstyle, it returns nil
+ # The setting is rolled back after the test case.
+ Analytics::CycleAnalytics::IssueStageEvent.connection.execute("SET LOCAL intervalstyle='postgres'")
+
+ records_fetcher.serialized_records do |relation|
+ durations = relation.map(&:total_time)
+ expect(durations).to all(be > 0)
+ end
+ end
+ end
+
context 'when sorting by end event ASC' do
let(:expected_issue_ids) { [issue_2.iid, issue_1.iid, issue_3.iid] }
diff --git a/spec/lib/gitlab/anonymous_session_spec.rb b/spec/lib/gitlab/anonymous_session_spec.rb
index 245ca02e91a..64186e9003a 100644
--- a/spec/lib/gitlab/anonymous_session_spec.rb
+++ b/spec/lib/gitlab/anonymous_session_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_sessions do
let(:default_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' }
let(:additional_session_id) { '7919a6f1bb119dd7396fadc38fd18d0d' }
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
it 'adds session id to proper key' do
subject.count_session_ip
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq 1
end
end
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
freeze_time do
subject.count_session_ip
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
expect(redis.ttl("session:lookup:ip:gitlab2:127.0.0.1")).to eq(24.hours.to_i)
end
end
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
subject.count_session_ip
new_anonymous_session.count_session_ip
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq(2)
end
end
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
describe '#stored_sessions' do
it 'returns all anonymous sessions per ip' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2)
end
@@ -54,13 +54,13 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
end
it 'removes obsolete lookup through ip entries' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2)
end
subject.cleanup_session_per_ip_count
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
expect(redis.exists("session:lookup:ip:gitlab2:127.0.0.1")).to eq(false)
end
end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index ecd68caba79..5ecec978017 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -152,6 +152,38 @@ RSpec.describe Gitlab::ApplicationContext do
end
end
end
+
+ context 'when using a runner project' do
+ let_it_be_with_reload(:runner) { create(:ci_runner, :project) }
+
+ it 'sets project path from runner project' do
+ context = described_class.new(runner: runner)
+
+ expect(result(context)).to include(project: runner.runner_projects.first.project.full_path)
+ end
+
+ context 'when the runner serves multiple projects' do
+ before do
+ create(:ci_runner_project, runner: runner, project: create(:project))
+ end
+
+ it 'does not set project path' do
+ context = described_class.new(runner: runner)
+
+ expect(result(context)).to include(project: nil)
+ end
+ end
+ end
+
+ context 'when using an instance runner' do
+ let_it_be(:runner) { create(:ci_runner, :instance) }
+
+ it 'does not sets project path' do
+ context = described_class.new(runner: runner)
+
+ expect(result(context)).to include(project: nil)
+ end
+ end
end
describe '#use' do
diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb
index c74bcf8d678..20c89eab5f5 100644
--- a/spec/lib/gitlab/application_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/application_rate_limiter_spec.rb
@@ -2,37 +2,37 @@
require 'spec_helper'
-RSpec.describe Gitlab::ApplicationRateLimiter do
+RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
- subject { described_class }
-
- describe '.throttled?', :clean_gitlab_redis_rate_limiting do
- let(:rate_limits) do
- {
- test_action: {
- threshold: 1,
- interval: 2.minutes
- },
- another_action: {
- threshold: 2,
- interval: 3.minutes
- }
+ let(:rate_limits) do
+ {
+ test_action: {
+ threshold: 1,
+ interval: 2.minutes
+ },
+ another_action: {
+ threshold: 2,
+ interval: 3.minutes
}
- end
+ }
+ end
- before do
- allow(described_class).to receive(:rate_limits).and_return(rate_limits)
- end
+ subject { described_class }
+
+ before do
+ allow(described_class).to receive(:rate_limits).and_return(rate_limits)
+ end
+ describe '.throttled?' do
context 'when the key is invalid' do
context 'is provided as a Symbol' do
context 'but is not defined in the rate_limits Hash' do
it 'raises an InvalidKeyError exception' do
key = :key_not_in_rate_limits_hash
- expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError)
+ expect { subject.throttled?(key, scope: [user]) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError)
end
end
end
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter do
it 'raises an InvalidKeyError exception' do
key = rate_limits.keys[0].to_s
- expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError)
+ expect { subject.throttled?(key, scope: [user]) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError)
end
end
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter do
it 'raises an InvalidKeyError exception' do
key = 'key_not_in_rate_limits_hash'
- expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError)
+ expect { subject.throttled?(key, scope: [user]) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError)
end
end
end
@@ -89,6 +89,17 @@ RSpec.describe Gitlab::ApplicationRateLimiter do
expect(subject.throttled?(:another_action, scope: scope)).to eq(true)
end
end
+
+ it 'allows peeking at the current state without changing its value' do
+ travel_to(start_time) do
+ expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
+ 2.times do
+ expect(subject.throttled?(:test_action, scope: scope, peek: true)).to eq(false)
+ end
+ expect(subject.throttled?(:test_action, scope: scope)).to eq(true)
+ expect(subject.throttled?(:test_action, scope: scope, peek: true)).to eq(true)
+ end
+ end
end
context 'when using ActiveRecord models as scope' do
@@ -104,6 +115,20 @@ RSpec.describe Gitlab::ApplicationRateLimiter do
end
end
+ describe '.peek' do
+ it 'peeks at the current state without changing its value' do
+ freeze_time do
+ expect(subject.peek(:test_action, scope: [user])).to eq(false)
+ expect(subject.throttled?(:test_action, scope: [user])).to eq(false)
+ 2.times do
+ expect(subject.peek(:test_action, scope: [user])).to eq(false)
+ end
+ expect(subject.throttled?(:test_action, scope: [user])).to eq(true)
+ expect(subject.peek(:test_action, scope: [user])).to eq(true)
+ end
+ end
+ end
+
describe '.log_request' do
let(:file_path) { 'master/README.md' }
let(:type) { :raw_blob_request_limit }
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index ac29bb22865..7200ff3c4db 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -96,10 +96,10 @@ module Gitlab
it "does not convert dangerous fenced code with inline script into HTML" do
input = '```mypre"><script>alert(3)</script>'
output =
- if Feature.enabled?(:use_cmark_renderer)
- "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n</div>\n</div>"
+ if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
+ "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
else
- "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"&gt;</span></code></pre>\n</div>\n</div>"
+ "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"&gt;</span></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
end
expect(render(input, context)).to include(output)
@@ -365,7 +365,10 @@ module Gitlab
output = <<~HTML
<div>
<div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
<pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
</div>
</div>
HTML
@@ -392,11 +395,14 @@ module Gitlab
<div>
<div>class.cpp</div>
<div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
<pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
<span id="LC2" class="line" lang="cpp"></span>
<span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
<span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
<span id="LC5" class="line" lang="cpp"><span class="p">}</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
</div>
</div>
HTML
diff --git a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
index 102d6fba97f..e5bc51edc2d 100644
--- a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
+++ b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe Gitlab::Auth::UserAccessDeniedReason do
it { is_expected.to match /must accept the Terms of Service/ }
it { is_expected.to include(user.username) }
+ it { is_expected.to include(Gitlab.config.gitlab.url) }
end
context 'when the user is internal' do
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 5ec6e23774a..32e647688ff 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -259,30 +259,48 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'while using OAuth tokens as passwords' do
let(:user) { create(:user) }
- let(:token_w_api_scope) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') }
let(:application) { Doorkeeper::Application.create!(name: 'MyApp', redirect_uri: 'https://app.com', owner: user) }
shared_examples 'an oauth failure' do
it 'fails' do
- expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip'))
+ access_token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api')
+
+ expect(gl_auth.find_for_git_client("oauth2", access_token.token, project: nil, ip: 'ip'))
.to have_attributes(auth_failure)
end
end
- it 'succeeds for OAuth tokens with the `api` scope' do
- expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :oauth, authentication_abilities: described_class.full_authentication_abilities)
- end
+ context 'with specified scopes' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:scopes, :abilities) do
+ 'api' | described_class.full_authentication_abilities
+ 'read_api' | described_class.read_only_authentication_abilities
+ 'read_repository' | [:download_code]
+ 'write_repository' | [:download_code, :push_code]
+ 'read_user' | []
+ 'sudo' | []
+ 'openid' | []
+ 'profile' | []
+ 'email' | []
+ end
- it 'fails for OAuth tokens with other scopes' do
- token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'read_user')
+ with_them do
+ it 'authenticates with correct abilities' do
+ access_token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: scopes)
- expect(gl_auth.find_for_git_client("oauth2", token.token, project: nil, ip: 'ip')).to have_attributes(auth_failure)
+ expect(gl_auth.find_for_git_client("oauth2", access_token.token, project: nil, ip: 'ip'))
+ .to have_attributes(actor: user, project: nil, type: :oauth, authentication_abilities: abilities)
+ end
+ end
end
it 'does not try password auth before oauth' do
+ access_token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api')
+
expect(gl_auth).not_to receive(:find_with_user_password)
- gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')
+ gl_auth.find_for_git_client("oauth2", access_token.token, project: nil, ip: 'ip')
end
context 'blocked user' do
diff --git a/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb b/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb
deleted file mode 100644
index 1e72b249c19..00000000000
--- a/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount do
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:merge_requests_table) { table(:merge_requests) }
- let(:merge_request_diffs_table) { table(:merge_request_diffs) }
- let(:merge_request_diff_commits_table) { table(:merge_request_diff_commits) }
-
- let(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') }
- let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id) }
- let(:merge_request) do
- merge_requests_table.create!(target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: 'mr name')
- end
-
- def create_diff!(name, commits: 0)
- mr_diff = merge_request_diffs_table.create!(
- merge_request_id: merge_request.id)
-
- commits.times do |i|
- merge_request_diff_commits_table.create!(
- merge_request_diff_id: mr_diff.id,
- relative_order: i, sha: i)
- end
-
- mr_diff
- end
-
- describe '#perform' do
- it 'migrates diffs that have no commits' do
- diff = create_diff!('with_multiple_commits', commits: 0)
-
- subject.perform(diff.id, diff.id)
-
- expect(diff.reload.commits_count).to eq(0)
- end
-
- it 'skips diffs that have commits_count already set' do
- timestamp = 2.days.ago
- diff = merge_request_diffs_table.create!(
- merge_request_id: merge_request.id,
- commits_count: 0,
- updated_at: timestamp)
-
- subject.perform(diff.id, diff.id)
-
- expect(diff.reload.updated_at).to be_within(1.second).of(timestamp)
- end
-
- it 'migrates multiple diffs to the correct values' do
- diffs = Array.new(3).map.with_index { |_, i| create_diff!(i, commits: 3) }
-
- subject.perform(diffs.first.id, diffs.last.id)
-
- diffs.each do |diff|
- expect(diff.reload.commits_count).to eq(3)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb b/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb
deleted file mode 100644
index 0b29163671c..00000000000
--- a/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::AddModifiedToApprovalMergeRequestRule, schema: 20181228175414 do
- let(:determine_if_rules_are_modified) { described_class.new }
-
- let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') }
- let(:projects) { table(:projects) }
- let(:normal_project) { projects.create!(namespace_id: namespace.id) }
- let(:overridden_project) { projects.create!(namespace_id: namespace.id) }
- let(:rules) { table(:approval_merge_request_rules) }
- let(:project_rules) { table(:approval_project_rules) }
- let(:sources) { table(:approval_merge_request_rule_sources) }
- let(:merge_requests) { table(:merge_requests) }
- let(:groups) { table(:namespaces) }
- let(:mr_groups) { table(:approval_merge_request_rules_groups) }
- let(:project_groups) { table(:approval_project_rules_groups) }
-
- before do
- project_rule = project_rules.create!(project_id: normal_project.id, approvals_required: 3, name: 'test rule')
- overridden_project_rule = project_rules.create!(project_id: overridden_project.id, approvals_required: 5, name: 'other test rule')
- overridden_project_rule_two = project_rules.create!(project_id: overridden_project.id, approvals_required: 7, name: 'super cool rule')
-
- merge_request = merge_requests.create!(target_branch: 'feature', source_branch: 'default', source_project_id: normal_project.id, target_project_id: normal_project.id)
- overridden_merge_request = merge_requests.create!(target_branch: 'feature-2', source_branch: 'default', source_project_id: overridden_project.id, target_project_id: overridden_project.id)
-
- merge_rule = rules.create!(merge_request_id: merge_request.id, approvals_required: 3, name: 'test rule')
- overridden_merge_rule = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 6, name: 'other test rule')
- overridden_merge_rule_two = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 7, name: 'super cool rule')
-
- sources.create!(approval_project_rule_id: project_rule.id, approval_merge_request_rule_id: merge_rule.id)
- sources.create!(approval_project_rule_id: overridden_project_rule.id, approval_merge_request_rule_id: overridden_merge_rule.id)
- sources.create!(approval_project_rule_id: overridden_project_rule_two.id, approval_merge_request_rule_id: overridden_merge_rule_two.id)
-
- group1 = groups.create!(name: "group1", path: "test_group1", type: 'Group')
- group2 = groups.create!(name: "group2", path: "test_group2", type: 'Group')
- group3 = groups.create!(name: "group3", path: "test_group3", type: 'Group')
-
- project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group1.id)
- project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group2.id)
- project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group3.id)
-
- mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule.id, group_id: group1.id)
- mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule_two.id, group_id: group2.id)
- end
-
- describe '#perform' do
- it 'changes the correct rules' do
- original_count = rules.all.count
-
- determine_if_rules_are_modified.perform(rules.minimum(:id), rules.maximum(:id))
-
- results = rules.where(modified_from_project_rule: true)
-
- expect(results.count).to eq 2
- expect(results.collect(&:name)).to eq(['other test rule', 'super cool rule'])
- expect(rules.count).to eq original_count
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb b/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb
deleted file mode 100644
index 1404ada3647..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillDeploymentClustersFromDeployments, :migration, schema: 20181228175414 do
- subject { described_class.new }
-
- describe '#perform' do
- it 'backfills deployment_cluster for all deployments in the given range with a non-null cluster_id' do
- deployment_clusters = table(:deployment_clusters)
-
- namespace = table(:namespaces).create!(name: 'the-namespace', path: 'the-path')
- project = table(:projects).create!(name: 'the-project', namespace_id: namespace.id)
- environment = table(:environments).create!(name: 'the-environment', project_id: project.id, slug: 'slug')
- cluster = table(:clusters).create!(name: 'the-cluster')
-
- deployment_data = { cluster_id: cluster.id, project_id: project.id, environment_id: environment.id, ref: 'abc', tag: false, sha: 'sha', status: 1 }
- expected_deployment_1 = create_deployment(**deployment_data)
- create_deployment(**deployment_data, cluster_id: nil) # no cluster_id
- expected_deployment_2 = create_deployment(**deployment_data)
- out_of_range_deployment = create_deployment(**deployment_data, cluster_id: cluster.id) # expected to be out of range
-
- # to test "ON CONFLICT DO NOTHING"
- existing_record_for_deployment_2 = deployment_clusters.create!(
- deployment_id: expected_deployment_2.id,
- cluster_id: expected_deployment_2.cluster_id,
- kubernetes_namespace: 'production'
- )
-
- subject.perform(expected_deployment_1.id, out_of_range_deployment.id - 1)
-
- expect(deployment_clusters.all.pluck(:deployment_id, :cluster_id, :kubernetes_namespace)).to contain_exactly(
- [expected_deployment_1.id, cluster.id, nil],
- [expected_deployment_2.id, cluster.id, existing_record_for_deployment_2.kubernetes_namespace]
- )
- end
-
- def create_deployment(**data)
- @iid ||= 0
- @iid += 1
- table(:deployments).create!(iid: @iid, **data)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb
deleted file mode 100644
index 9194525e713..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillEnvironmentIdDeploymentMergeRequests, schema: 20181228175414 do
- let(:environments) { table(:environments) }
- let(:merge_requests) { table(:merge_requests) }
- let(:deployments) { table(:deployments) }
- let(:deployment_merge_requests) { table(:deployment_merge_requests) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- subject(:migration) { described_class.new }
-
- it 'correctly backfills environment_id column' do
- namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
-
- production = environments.create!(project_id: project.id, name: 'production', slug: 'production')
- staging = environments.create!(project_id: project.id, name: 'staging', slug: 'staging')
-
- mr = merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id)
-
- deployment1 = deployments.create!(environment_id: staging.id, iid: 1, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1)
- deployment2 = deployments.create!(environment_id: production.id, iid: 2, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1)
- deployment3 = deployments.create!(environment_id: production.id, iid: 3, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1)
-
- # mr is tracked twice in production through deployment2 and deployment3
- deployment_merge_requests.create!(deployment_id: deployment1.id, merge_request_id: mr.id)
- deployment_merge_requests.create!(deployment_id: deployment2.id, merge_request_id: mr.id)
- deployment_merge_requests.create!(deployment_id: deployment3.id, merge_request_id: mr.id)
-
- expect(deployment_merge_requests.where(environment_id: nil).count).to eq(3)
-
- migration.backfill_range(1, mr.id)
-
- expect(deployment_merge_requests.where(environment_id: nil).count).to be_zero
- expect(deployment_merge_requests.count).to eq(2)
-
- production_deployments = deployment_merge_requests.where(environment_id: production.id)
- expect(production_deployments.count).to eq(1)
- expect(production_deployments.first.deployment_id).to eq(deployment2.id)
-
- expect(deployment_merge_requests.where(environment_id: staging.id).count).to eq(1)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb
deleted file mode 100644
index d33f52514da..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20181228175414 do
- let(:merge_requests) { table(:merge_requests) }
- let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) }
- let(:metrics) { table(:merge_request_metrics) }
-
- let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
- let(:project) { table(:projects).create!(namespace_id: namespace.id) }
-
- subject { described_class.new }
-
- describe '#perform' do
- let!(:open_mr) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master') }
-
- let!(:closed_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
- let!(:closed_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
- let!(:closed_mr_1_metrics) { metrics.create!(merge_request_id: closed_mr_1.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
- let!(:closed_mr_2_metrics) { metrics.create!(merge_request_id: closed_mr_2.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
- let!(:closed_mr_2_cleanup_schedule) { cleanup_schedules.create!(merge_request_id: closed_mr_2.id, scheduled_at: Time.current) }
-
- let!(:merged_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) }
- let!(:merged_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3, updated_at: Time.current) }
- let!(:merged_mr_1_metrics) { metrics.create!(merge_request_id: merged_mr_1.id, target_project_id: project.id, merged_at: Time.current, created_at: Time.current, updated_at: Time.current) }
-
- let!(:closed_mr_3) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
- let!(:closed_mr_3_metrics) { metrics.create!(merge_request_id: closed_mr_3.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
-
- it 'creates records for all closed and merged merge requests in range' do
- expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with(
- message: 'Backfilled merge_request_cleanup_schedules records',
- count: 3
- )
-
- subject.perform(open_mr.id, merged_mr_2.id)
-
- aggregate_failures do
- expect(cleanup_schedules.all.pluck(:merge_request_id))
- .to contain_exactly(closed_mr_1.id, closed_mr_2.id, merged_mr_1.id, merged_mr_2.id)
- expect(cleanup_schedules.find_by(merge_request_id: closed_mr_1.id).scheduled_at.to_s)
- .to eq((closed_mr_1_metrics.latest_closed_at + 14.days).to_s)
- expect(cleanup_schedules.find_by(merge_request_id: closed_mr_2.id).scheduled_at.to_s)
- .to eq(closed_mr_2_cleanup_schedule.scheduled_at.to_s)
- expect(cleanup_schedules.find_by(merge_request_id: merged_mr_1.id).scheduled_at.to_s)
- .to eq((merged_mr_1_metrics.merged_at + 14.days).to_s)
- expect(cleanup_schedules.find_by(merge_request_id: merged_mr_2.id).scheduled_at.to_s)
- .to eq((merged_mr_2.updated_at + 14.days).to_s)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb
deleted file mode 100644
index 0f8adca2ca4..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceSettings, schema: 20181228175414 do
- let(:namespaces) { table(:namespaces) }
- let(:namespace_settings) { table(:namespace_settings) }
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
-
- subject { described_class.new }
-
- describe '#perform' do
- it 'creates settings for all projects in range' do
- namespaces.create!(id: 5, name: 'test1', path: 'test1')
- namespaces.create!(id: 7, name: 'test2', path: 'test2')
- namespaces.create!(id: 8, name: 'test3', path: 'test3')
-
- subject.perform(5, 7)
-
- expect(namespace_settings.all.pluck(:namespace_id)).to contain_exactly(5, 7)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
deleted file mode 100644
index e6b0db2ab73..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectSettings, schema: 20181228175414 do
- let(:projects) { table(:projects) }
- let(:project_settings) { table(:project_settings) }
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:project) { projects.create!(namespace_id: namespace.id) }
-
- subject { described_class.new }
-
- describe '#perform' do
- it 'creates settings for all projects in range' do
- projects.create!(id: 5, namespace_id: namespace.id)
- projects.create!(id: 7, namespace_id: namespace.id)
- projects.create!(id: 8, namespace_id: namespace.id)
-
- subject.perform(5, 7)
-
- expect(project_settings.all.pluck(:project_id)).to contain_exactly(5, 7)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb
deleted file mode 100644
index 3468df3dccd..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillPushRulesIdInProjects, :migration, schema: 20181228175414 do
- let(:push_rules) { table(:push_rules) }
- let(:projects) { table(:projects) }
- let(:project_settings) { table(:project_settings) }
- let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
-
- subject { described_class.new }
-
- describe '#perform' do
- it 'creates new project push_rules for all push rules in the range' do
- project_1 = projects.create!(id: 1, namespace_id: namespace.id)
- project_2 = projects.create!(id: 2, namespace_id: namespace.id)
- project_3 = projects.create!(id: 3, namespace_id: namespace.id)
- project_settings_1 = project_settings.create!(project_id: project_1.id)
- project_settings_2 = project_settings.create!(project_id: project_2.id)
- project_settings_3 = project_settings.create!(project_id: project_3.id)
- push_rule_1 = push_rules.create!(id: 5, is_sample: false, project_id: project_1.id)
- push_rule_2 = push_rules.create!(id: 6, is_sample: false, project_id: project_2.id)
- push_rules.create!(id: 8, is_sample: false, project_id: 3)
-
- subject.perform(5, 7)
-
- expect(project_settings_1.reload.push_rule_id).to eq(push_rule_1.id)
- expect(project_settings_2.reload.push_rule_id).to eq(push_rule_2.id)
- expect(project_settings_3.reload.push_rule_id).to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb
new file mode 100644
index 00000000000..7cc64889fc8
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DropInvalidSecurityFindings, schema: 20211108211434 do
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id) }
+
+ let(:pipelines) { table(:ci_pipelines) }
+ let!(:pipeline) { pipelines.create!(project_id: project.id) }
+
+ let(:ci_builds) { table(:ci_builds) }
+ let!(:ci_build) { ci_builds.create! }
+
+ let(:security_scans) { table(:security_scans) }
+ let!(:security_scan) do
+ security_scans.create!(
+ scan_type: 1,
+ status: 1,
+ build_id: ci_build.id,
+ project_id: project.id,
+ pipeline_id: pipeline.id
+ )
+ end
+
+ let(:vulnerability_scanners) { table(:vulnerability_scanners) }
+ let!(:vulnerability_scanner) { vulnerability_scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+
+ let(:security_findings) { table(:security_findings) }
+ let!(:security_finding_without_uuid) do
+ security_findings.create!(
+ severity: 1,
+ confidence: 1,
+ scan_id: security_scan.id,
+ scanner_id: vulnerability_scanner.id,
+ uuid: nil
+ )
+ end
+
+ let!(:security_finding_with_uuid) do
+ security_findings.create!(
+ severity: 1,
+ confidence: 1,
+ scan_id: security_scan.id,
+ scanner_id: vulnerability_scanner.id,
+ uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e'
+ )
+ end
+
+ let(:sub_batch_size) { 10_000 }
+
+ subject { described_class.new.perform(security_finding_without_uuid.id, security_finding_with_uuid.id, sub_batch_size) }
+
+ it 'drops Security::Finding objects with no UUID' do
+ expect { subject }.to change(security_findings, :count).from(2).to(1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb b/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
deleted file mode 100644
index 35ec8be691a..00000000000
--- a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixPromotedEpicsDiscussionIds, schema: 20181228175414 do
- let(:namespaces) { table(:namespaces) }
- let(:users) { table(:users) }
- let(:epics) { table(:epics) }
- let(:notes) { table(:notes) }
-
- let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:epic1) { epics.create!(id: 1, author_id: user.id, iid: 1, group_id: namespace.id, title: 'Epic with discussion', title_html: 'Epic with discussion') }
-
- def create_note(discussion_id)
- notes.create!(note: 'note comment',
- noteable_id: epic1.id,
- noteable_type: 'Epic',
- discussion_id: discussion_id)
- end
-
- def expect_valid_discussion_id(id)
- expect(id).to match(/\A\h{40}\z/)
- end
-
- describe '#perform with batch of discussion ids' do
- it 'updates discussion ids' do
- note1 = create_note('00000000')
- note2 = create_note('00000000')
- note3 = create_note('10000000')
-
- subject.perform(%w(00000000 10000000))
-
- expect_valid_discussion_id(note1.reload.discussion_id)
- expect_valid_discussion_id(note2.reload.discussion_id)
- expect_valid_discussion_id(note3.reload.discussion_id)
- expect(note1.discussion_id).to eq(note2.discussion_id)
- expect(note1.discussion_id).not_to eq(note3.discussion_id)
- end
-
- it 'skips notes with discussion id not in range' do
- note4 = create_note('20000000')
-
- subject.perform(%w(00000000 10000000))
-
- expect(note4.reload.discussion_id).to eq('20000000')
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
deleted file mode 100644
index 95509f9b897..00000000000
--- a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
+++ /dev/null
@@ -1,104 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixUserNamespaceNames, schema: 20181228175414 do
- let(:namespaces) { table(:namespaces) }
- let(:users) { table(:users) }
- let(:user) { users.create!(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
-
- context 'updating the namespace names' do
- it 'updates a user namespace within range' do
- user2 = users.create!(name: "Other user's full name", projects_limit: 10, username: 'also-not-null', email: '2')
- user_namespace1 = namespaces.create!(
- id: 2,
- owner_id: user.id,
- name: "Should be the user's name",
- path: user.username
- )
- user_namespace2 = namespaces.create!(
- id: 3,
- owner_id: user2.id,
- name: "Should also be the user's name",
- path: user.username
- )
-
- described_class.new.perform(1, 5)
-
- expect(user_namespace1.reload.name).to eq("The user's full name")
- expect(user_namespace2.reload.name).to eq("Other user's full name")
- end
-
- it 'does not update namespaces out of range' do
- user_namespace = namespaces.create!(
- id: 6,
- owner_id: user.id,
- name: "Should be the user's name",
- path: user.username
- )
-
- expect { described_class.new.perform(1, 5) }
- .not_to change { user_namespace.reload.name }
- end
-
- it 'does not update groups owned by the users' do
- user_group = namespaces.create!(
- id: 2,
- owner_id: user.id,
- name: 'A group name',
- path: 'the-path',
- type: 'Group'
- )
-
- expect { described_class.new.perform(1, 5) }
- .not_to change { user_group.reload.name }
- end
- end
-
- context 'namespace route names' do
- let(:routes) { table(:routes) }
- let(:namespace) do
- namespaces.create!(
- id: 2,
- owner_id: user.id,
- name: "Will be updated to the user's name",
- path: user.username
- )
- end
-
- it "updates the route name if it didn't match the namespace" do
- route = routes.create!(path: namespace.path, name: 'Incorrect name', source_type: 'Namespace', source_id: namespace.id)
-
- described_class.new.perform(1, 5)
-
- expect(route.reload.name).to eq("The user's full name")
- end
-
- it 'updates the route name if it was nil match the namespace' do
- route = routes.create!(path: namespace.path, name: nil, source_type: 'Namespace', source_id: namespace.id)
-
- described_class.new.perform(1, 5)
-
- expect(route.reload.name).to eq("The user's full name")
- end
-
- it "doesn't update group routes" do
- route = routes.create!(path: 'group-path', name: 'Group name', source_type: 'Group', source_id: namespace.id)
-
- expect { described_class.new.perform(1, 5) }
- .not_to change { route.reload.name }
- end
-
- it "doesn't touch routes for namespaces out of range" do
- user_namespace = namespaces.create!(
- id: 6,
- owner_id: user.id,
- name: "Should be the user's name",
- path: user.username
- )
-
- expect { described_class.new.perform(1, 5) }
- .not_to change { user_namespace.reload.name }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
deleted file mode 100644
index b4444df674e..00000000000
--- a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
+++ /dev/null
@@ -1,98 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, schema: 20181228175414 do
- let(:namespaces) { table(:namespaces) }
- let(:users) { table(:users) }
- let(:routes) { table(:routes) }
- let(:projects) { table(:projects) }
-
- let(:user) { users.create!(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
-
- let(:namespace) do
- namespaces.create!(
- owner_id: user.id,
- name: "Should eventually be the user's name",
- path: user.username
- )
- end
-
- let(:project) do
- projects.create!(namespace_id: namespace.id, name: 'Project Name')
- end
-
- it "updates the route for a project if it did not match the user's name" do
- route = routes.create!(
- id: 1,
- path: "#{user.username}/#{project.path}",
- source_id: project.id,
- source_type: 'Project',
- name: 'Completely wrong'
- )
-
- described_class.new.perform(1, 5)
-
- expect(route.reload.name).to eq("The user's full name / Project Name")
- end
-
- it 'updates the route for a project if the name was nil' do
- route = routes.create!(
- id: 1,
- path: "#{user.username}/#{project.path}",
- source_id: project.id,
- source_type: 'Project',
- name: nil
- )
-
- described_class.new.perform(1, 5)
-
- expect(route.reload.name).to eq("The user's full name / Project Name")
- end
-
- it 'does not update routes that were are out of the range' do
- route = routes.create!(
- id: 6,
- path: "#{user.username}/#{project.path}",
- source_id: project.id,
- source_type: 'Project',
- name: 'Completely wrong'
- )
-
- expect { described_class.new.perform(1, 5) }
- .not_to change { route.reload.name }
- end
-
- it 'does not update routes for projects in groups owned by the user' do
- group = namespaces.create!(
- owner_id: user.id,
- name: 'A group',
- path: 'a-path',
- type: ''
- )
- project = projects.create!(namespace_id: group.id, name: 'Project Name')
- route = routes.create!(
- id: 1,
- path: "#{group.path}/#{project.path}",
- source_id: project.id,
- source_type: 'Project',
- name: 'Completely wrong'
- )
-
- expect { described_class.new.perform(1, 5) }
- .not_to change { route.reload.name }
- end
-
- it 'does not update routes for namespaces' do
- route = routes.create!(
- id: 1,
- path: namespace.path,
- source_id: namespace.id,
- source_type: 'Namespace',
- name: 'Completely wrong'
- )
-
- expect { described_class.new.perform(1, 5) }
- .not_to change { route.reload.name }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
index a0543ca9958..7a524d1489a 100644
--- a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
+++ b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
@@ -3,32 +3,22 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
- let(:database) { :main }
let(:worker_class) { BackgroundMigrationWorker }
- let(:coordinator) { described_class.new(database, worker_class) }
+ let(:tracking_database) { worker_class.tracking_database }
+ let(:coordinator) { described_class.new(worker_class) }
- describe '.for_database' do
+ describe '.for_tracking_database' do
it 'returns an executor with the correct worker class and database' do
- coordinator = described_class.for_database(database)
+ coordinator = described_class.for_tracking_database(tracking_database)
- expect(coordinator.database).to eq(database)
expect(coordinator.worker_class).to eq(worker_class)
end
- context 'when passed in as a string' do
- it 'retruns an executor with the correct worker class and database' do
- coordinator = described_class.for_database(database.to_s)
-
- expect(coordinator.database).to eq(database)
- expect(coordinator.worker_class).to eq(worker_class)
- end
- end
-
context 'when an invalid value is given' do
it 'raises an error' do
expect do
- described_class.for_database('notvalid')
- end.to raise_error(ArgumentError, "database must be one of [main], got 'notvalid'")
+ described_class.for_tracking_database('notvalid')
+ end.to raise_error(ArgumentError, /tracking_database must be one of/)
end
end
end
diff --git a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb
deleted file mode 100644
index 64e8afedf52..00000000000
--- a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::LinkLfsObjectsProjects, :migration, schema: 20181228175414 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:fork_networks) { table(:fork_networks) }
- let(:fork_network_members) { table(:fork_network_members) }
- let(:lfs_objects) { table(:lfs_objects) }
- let(:lfs_objects_projects) { table(:lfs_objects_projects) }
-
- let(:namespace) { namespaces.create!(name: 'GitLab', path: 'gitlab') }
-
- let(:fork_network) { fork_networks.create!(root_project_id: source_project.id) }
- let(:another_fork_network) { fork_networks.create!(root_project_id: another_source_project.id) }
-
- let(:source_project) { projects.create!(namespace_id: namespace.id) }
- let(:another_source_project) { projects.create!(namespace_id: namespace.id) }
- let(:project) { projects.create!(namespace_id: namespace.id) }
- let(:another_project) { projects.create!(namespace_id: namespace.id) }
- let(:partially_linked_project) { projects.create!(namespace_id: namespace.id) }
- let(:fully_linked_project) { projects.create!(namespace_id: namespace.id) }
-
- let(:lfs_object) { lfs_objects.create!(oid: 'abc123', size: 100) }
- let(:another_lfs_object) { lfs_objects.create!(oid: 'def456', size: 200) }
-
- let!(:source_project_lop_1) do
- lfs_objects_projects.create!(
- lfs_object_id: lfs_object.id,
- project_id: source_project.id
- )
- end
-
- let!(:source_project_lop_2) do
- lfs_objects_projects.create!(
- lfs_object_id: another_lfs_object.id,
- project_id: source_project.id
- )
- end
-
- let!(:another_source_project_lop_1) do
- lfs_objects_projects.create!(
- lfs_object_id: lfs_object.id,
- project_id: another_source_project.id
- )
- end
-
- let!(:another_source_project_lop_2) do
- lfs_objects_projects.create!(
- lfs_object_id: another_lfs_object.id,
- project_id: another_source_project.id
- )
- end
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 2)
-
- # Create links between projects
- fork_network_members.create!(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
-
- [project, partially_linked_project, fully_linked_project].each do |p|
- fork_network_members.create!(
- fork_network_id: fork_network.id,
- project_id: p.id,
- forked_from_project_id: fork_network.root_project_id
- )
- end
-
- fork_network_members.create!(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
- fork_network_members.create!(fork_network_id: another_fork_network.id, project_id: another_project.id, forked_from_project_id: another_fork_network.root_project_id)
-
- # Links LFS objects to some projects
- lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: fully_linked_project.id)
- lfs_objects_projects.create!(lfs_object_id: another_lfs_object.id, project_id: fully_linked_project.id)
- lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: partially_linked_project.id)
- end
-
- context 'when there are LFS objects to be linked' do
- it 'creates LfsObjectsProject records for forks based on the specified range of LfsObjectProject id' do
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
- expect(logger).to receive(:info).exactly(4).times
- end
-
- expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.to change { lfs_objects_projects.count }.by(5)
-
- expect(lfs_object_ids_for(project)).to match_array(lfs_object_ids_for(source_project))
- expect(lfs_object_ids_for(another_project)).to match_array(lfs_object_ids_for(another_source_project))
- expect(lfs_object_ids_for(partially_linked_project)).to match_array(lfs_object_ids_for(source_project))
-
- expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.not_to change { lfs_objects_projects.count }
- end
- end
-
- context 'when there are no LFS objects to be linked' do
- before do
- # Links LFS objects to all projects
- projects.all.each do |p|
- lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: p.id)
- lfs_objects_projects.create!(lfs_object_id: another_lfs_object.id, project_id: p.id)
- end
- end
-
- it 'does not create LfsObjectProject records' do
- expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }
- .not_to change { lfs_objects_projects.count }
- end
- end
-
- def lfs_object_ids_for(project)
- lfs_objects_projects.where(project_id: project.id).pluck(:lfs_object_id)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb
deleted file mode 100644
index 4287d6723cf..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, schema: 20181228175414 do
- subject(:fingerprint_migrator) { described_class.new }
-
- let(:key_table) { table(:keys) }
-
- before do
- generate_fingerprints!
- end
-
- it 'correctly creates a sha256 fingerprint for a key' do
- key_1 = Key.find(1017)
- key_2 = Key.find(1027)
-
- expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1')
- expect(key_1.fingerprint_sha256).to eq(nil)
-
- expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4')
- expect(key_2.fingerprint_sha256).to eq(nil)
-
- query_count = ActiveRecord::QueryRecorder.new do
- fingerprint_migrator.perform(1, 10000)
- end.count
-
- expect(query_count).to eq(8)
-
- key_1.reload
- key_2.reload
-
- expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1')
- expect(key_1.fingerprint_sha256).to eq('nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg')
-
- expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4')
- expect(key_2.fingerprint_sha256).to eq('zMNbLekgdjtcgDv8VSC0z5lpdACMG3Q4PUoIz5+H2jM')
- end
-
- context 'with invalid keys' do
- before do
- key = Key.find(1017)
- # double space after "ssh-rsa" leads to a
- # OpenSSL::PKey::PKeyError in Net::SSH::KeyFactory.load_data_public_key
- key.update_column(:key, key.key.gsub('ssh-rsa ', 'ssh-rsa '))
- end
-
- it 'ignores errors and does not set the fingerprint' do
- fingerprint_migrator.perform(1, 10000)
-
- key_1 = Key.find(1017)
- key_2 = Key.find(1027)
-
- expect(key_1.fingerprint_sha256).to be_nil
- expect(key_2.fingerprint_sha256).not_to be_nil
- end
- end
-
- it 'migrates all keys' do
- expect(Key.where(fingerprint_sha256: nil).count).to eq(Key.all.count)
-
- fingerprint_migrator.perform(1, 10000)
-
- expect(Key.where(fingerprint_sha256: nil).count).to eq(0)
- end
-
- def generate_fingerprints!
- values = ""
- (1000..2000).to_a.each do |record|
- key = base_key_for(record)
- fingerprint = fingerprint_for(key)
-
- values += "(#{record}, #{record}, 'test-#{record}', '#{key}', '#{fingerprint}'),"
- end
-
- update_query = <<~SQL
- INSERT INTO keys ( id, user_id, title, key, fingerprint )
- VALUES
- #{values.chomp(",")};
- SQL
-
- ActiveRecord::Base.connection.execute(update_query)
- end
-
- def base_key_for(record)
- 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt0000k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0='
- .gsub("0000", "%04d" % (record - 1)) # generate arbitrary keys with placeholder 0000 within the key above
- end
-
- def fingerprint_for(key)
- Gitlab::SSHPublicKey.new(key).fingerprint("md5")
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
deleted file mode 100644
index b6d93b9ff54..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 20181228175414 do
- let(:projects) { table(:projects) }
-
- subject(:migrate_pages_metadata) { described_class.new }
-
- describe '#perform' do
- let(:namespaces) { table(:namespaces) }
- let(:builds) { table(:ci_builds) }
- let(:pages_metadata) { table(:project_pages_metadata) }
-
- it 'marks specified projects with successful pages deployment' do
- namespace = namespaces.create!(name: 'gitlab', path: 'gitlab-org')
- not_migrated_with_pages = projects.create!(namespace_id: namespace.id, name: 'Not Migrated With Pages')
- builds.create!(project_id: not_migrated_with_pages.id, type: 'GenericCommitStatus', status: 'success', stage: 'deploy', name: 'pages:deploy')
-
- migrated = projects.create!(namespace_id: namespace.id, name: 'Migrated')
- pages_metadata.create!(project_id: migrated.id, deployed: true)
-
- not_migrated_no_pages = projects.create!(namespace_id: namespace.id, name: 'Not Migrated No Pages')
- project_not_in_relation_scope = projects.create!(namespace_id: namespace.id, name: 'Other')
-
- ids = [not_migrated_no_pages.id, not_migrated_with_pages.id, migrated.id]
-
- migrate_pages_metadata.perform(ids.min, ids.max)
-
- expect(pages_metadata.find_by_project_id(not_migrated_with_pages.id).deployed).to eq(true)
- expect(pages_metadata.find_by_project_id(not_migrated_no_pages.id).deployed).to eq(false)
- expect(pages_metadata.find_by_project_id(migrated.id).deployed).to eq(true)
- expect(pages_metadata.find_by_project_id(project_not_in_relation_scope.id)).to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb b/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb
deleted file mode 100644
index 0f7bb06e830..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-RSpec.describe Gitlab::BackgroundMigration::MigrateToHashedStorage, :sidekiq, :redis do
- let(:migrator) { Gitlab::HashedStorage::Migrator.new }
-
- subject(:background_migration) { described_class.new }
-
- describe '#perform' do
- let!(:project) { create(:project, :empty_repo, :legacy_storage) }
-
- context 'with pending rollback' do
- it 'aborts rollback operation' do
- Sidekiq::Testing.disable! do
- Sidekiq::Client.push(
- 'queue' => ::HashedStorage::ProjectRollbackWorker.queue,
- 'class' => ::HashedStorage::ProjectRollbackWorker,
- 'args' => [project.id]
- )
-
- expect { background_migration.perform }.to change { migrator.rollback_pending? }.from(true).to(false)
- end
- end
- end
-
- it 'enqueues legacy projects to be migrated' do
- Sidekiq::Testing.fake! do
- expect { background_migration.perform }.to change { Sidekiq::Queues[::HashedStorage::MigratorWorker.queue].size }.by(1)
- end
- end
-
- context 'when executing all jobs' do
- it 'migrates legacy projects' do
- Sidekiq::Testing.inline! do
- expect { background_migration.perform }.to change { project.reload.legacy_storage? }.from(true).to(false)
- end
- end
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb b/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb
deleted file mode 100644
index 944ee98ed4a..00000000000
--- a/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb
+++ /dev/null
@@ -1,94 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateCanonicalEmails, :migration, schema: 20181228175414 do
- let(:migration) { described_class.new }
-
- let_it_be(:users_table) { table(:users) }
- let_it_be(:user_canonical_emails_table) { table(:user_canonical_emails) }
-
- let_it_be(:users) { users_table.all }
- let_it_be(:user_canonical_emails) { user_canonical_emails_table.all }
-
- subject { migration.perform(1, 1) }
-
- describe 'gmail users' do
- using RSpec::Parameterized::TableSyntax
-
- where(:original_email, :expected_result) do
- 'legitimateuser@gmail.com' | 'legitimateuser@gmail.com'
- 'userwithplus+somestuff@gmail.com' | 'userwithplus@gmail.com'
- 'user.with.periods@gmail.com' | 'userwithperiods@gmail.com'
- 'user.with.periods.and.plus+someotherstuff@gmail.com' | 'userwithperiodsandplus@gmail.com'
- end
-
- with_them do
- it 'generates the correct canonical email' do
- create_user(email: original_email, id: 1)
-
- subject
-
- result = canonical_emails
- expect(result.count).to eq 1
- expect(result.first).to match({
- 'user_id' => 1,
- 'canonical_email' => expected_result
- })
- end
- end
- end
-
- describe 'non gmail.com domain users' do
- %w[
- legitimateuser@somedomain.com
- userwithplus+somestuff@other.com
- user.with.periods@gmail.org
- user.with.periods.and.plus+someotherstuff@orangmail.com
- ].each do |non_gmail_address|
- it 'does not generate a canonical email' do
- create_user(email: non_gmail_address, id: 1)
-
- subject
-
- expect(canonical_emails(user_id: 1).count).to eq 0
- end
- end
- end
-
- describe 'gracefully handles missing records' do
- specify { expect { subject }.not_to raise_error }
- end
-
- describe 'gracefully handles existing records, some of which may have an already-existing identical canonical_email field' do
- let_it_be(:user_one) { create_user(email: "example.user@gmail.com", id: 1) }
- let_it_be(:user_two) { create_user(email: "exampleuser@gmail.com", id: 2) }
- let_it_be(:user_email_one) { user_canonical_emails.create!(canonical_email: "exampleuser@gmail.com", user_id: user_one.id) }
-
- subject { migration.perform(1, 2) }
-
- it 'only creates one record' do
- subject
-
- expect(canonical_emails.count).not_to be_nil
- end
- end
-
- def create_user(attributes)
- default_attributes = {
- projects_limit: 0
- }
-
- users.create!(default_attributes.merge!(attributes))
- end
-
- def canonical_emails(user_id: nil)
- filter_by_id = user_id ? "WHERE user_id = #{user_id}" : ""
-
- ApplicationRecord.connection.execute <<~SQL
- SELECT canonical_email, user_id
- FROM user_canonical_emails
- #{filter_by_id};
- SQL
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb
deleted file mode 100644
index dc8c8c75b83..00000000000
--- a/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Gitlab::BackgroundMigration::PopulateDismissedStateForVulnerabilities, schema: 20181228175414 do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:vulnerabilities) { table(:vulnerabilities) }
-
- let!(:namespace) { namespaces.create!(name: "foo", path: "bar") }
- let!(:user) { users.create!(name: 'John Doe', email: 'test@example.com', projects_limit: 5) }
- let!(:project) { projects.create!(namespace_id: namespace.id) }
- let!(:vulnerability_params) do
- {
- project_id: project.id,
- author_id: user.id,
- title: 'Vulnerability',
- severity: 5,
- confidence: 5,
- report_type: 5
- }
- end
-
- let!(:vulnerability_1) { vulnerabilities.create!(vulnerability_params.merge(state: 1)) }
- let!(:vulnerability_2) { vulnerabilities.create!(vulnerability_params.merge(state: 3)) }
-
- describe '#perform' do
- it 'changes state of vulnerability to dismissed' do
- subject.perform(vulnerability_1.id, vulnerability_2.id)
-
- expect(vulnerability_1.reload.state).to eq(2)
- expect(vulnerability_2.reload.state).to eq(2)
- end
-
- it 'populates missing dismissal information' do
- expect_next_instance_of(::Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation) do |migration|
- expect(migration).to receive(:perform).with(vulnerability_1.id, vulnerability_2.id)
- end
-
- subject.perform(vulnerability_1.id, vulnerability_2.id)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb
deleted file mode 100644
index 6722321d5f7..00000000000
--- a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateHasVulnerabilities, schema: 20181228175414 do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:project_settings) { table(:project_settings) }
- let(:vulnerabilities) { table(:vulnerabilities) }
-
- let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:vulnerability_base_params) { { title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, author_id: user.id } }
-
- let!(:project_1) { projects.create!(namespace_id: namespace.id, name: 'foo_1') }
- let!(:project_2) { projects.create!(namespace_id: namespace.id, name: 'foo_2') }
- let!(:project_3) { projects.create!(namespace_id: namespace.id, name: 'foo_3') }
-
- before do
- project_settings.create!(project_id: project_1.id)
- vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_1.id))
- vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_3.id))
-
- allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, error: true)
- end
-
- describe '#perform' do
- it 'sets `has_vulnerabilities` attribute of project_settings' do
- expect { subject.perform(project_1.id, project_3.id) }.to change { project_settings.count }.from(1).to(2)
- .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
- end
-
- it 'writes info log message' do
- subject.perform(project_1.id, project_3.id)
-
- expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
- message: 'Projects has been processed to populate `has_vulnerabilities` information',
- count: 2)
- end
-
- context 'when non-existing project_id is given' do
- it 'populates only for the existing projects' do
- expect { subject.perform(project_1.id, 0, project_3.id) }.to change { project_settings.count }.from(1).to(2)
- .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
- end
- end
-
- context 'when an error happens' do
- before do
- allow(described_class::ProjectSetting).to receive(:upsert_for).and_raise('foo')
- end
-
- it 'writes error log message' do
- subject.perform(project_1.id, project_3.id)
-
- expect(::Gitlab::BackgroundMigration::Logger).to have_received(:error).with(migrator: described_class.name,
- message: 'foo',
- project_ids: [project_1.id, project_3.id])
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
deleted file mode 100644
index 1d8eed53553..00000000000
--- a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, schema: 20181228175414 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:users) { table(:users) }
-
- let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
- let(:user_2) { users.create!(email: 'test2@example.com', projects_limit: 100, username: 'test') }
- let(:user_3) { users.create!(email: 'test3@example.com', projects_limit: 100, username: 'test') }
-
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
- let(:merge_requests) { table(:merge_requests) }
- let(:merge_request_assignees) { table(:merge_request_assignees) }
-
- def create_merge_request(id, params = {})
- params.merge!(id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}")
-
- merge_requests.create!(params)
- end
-
- before do
- create_merge_request(2, assignee_id: user.id)
- create_merge_request(3, assignee_id: user_2.id)
- create_merge_request(4, assignee_id: user_3.id)
-
- # Test filtering MRs without assignees
- create_merge_request(5, assignee_id: nil)
- # Test filtering already migrated row
- merge_request_assignees.create!(merge_request_id: 2, user_id: user_3.id)
- end
-
- describe '#perform' do
- it 'creates merge_request_assignees rows according to merge_requests' do
- subject.perform(1, 4)
-
- rows = merge_request_assignees.order(:id).map { |row| row.attributes.slice('merge_request_id', 'user_id') }
- existing_rows = [
- { 'merge_request_id' => 2, 'user_id' => user_3.id }
- ]
- created_rows = [
- { 'merge_request_id' => 3, 'user_id' => user_2.id },
- { 'merge_request_id' => 4, 'user_id' => user_3.id }
- ]
- expected_rows = existing_rows + created_rows
-
- expect(rows.size).to eq(expected_rows.size)
- expected_rows.each do |expected_row|
- expect(rows).to include(expected_row)
- end
- end
- end
-
- describe '#perform_all_sync' do
- it 'executes peform for all merge requests in batches' do
- expect(subject).to receive(:perform).with(2, 4).ordered
- expect(subject).to receive(:perform).with(5, 5).ordered
-
- subject.perform_all_sync(batch_size: 3)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb
deleted file mode 100644
index 1c987d3876f..00000000000
--- a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20181228175414 do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:vulnerabilities) { table(:vulnerabilities) }
- let(:findings) { table(:vulnerability_occurrences) }
- let(:scanners) { table(:vulnerability_scanners) }
- let(:identifiers) { table(:vulnerability_identifiers) }
- let(:feedback) { table(:vulnerability_feedback) }
-
- let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
- let(:vulnerability_1) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
- let(:vulnerability_2) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
- let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
- let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'identifier') }
-
- before do
- feedback.create!(feedback_type: 0,
- category: 'sast',
- project_fingerprint: '418291a26024a1445b23fe64de9380cdcdfd1fa8',
- project_id: project.id,
- author_id: user.id,
- created_at: Time.current)
-
- findings.create!(name: 'Finding',
- report_type: 'sast',
- project_fingerprint: Gitlab::Database::ShaAttribute.new.serialize('418291a26024a1445b23fe64de9380cdcdfd1fa8'),
- location_fingerprint: 'bar',
- severity: 1,
- confidence: 1,
- metadata_version: 1,
- raw_metadata: '',
- uuid: SecureRandom.uuid,
- project_id: project.id,
- vulnerability_id: vulnerability_1.id,
- scanner_id: scanner.id,
- primary_identifier_id: identifier.id)
-
- allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, warn: true, error: true)
- end
-
- describe '#perform' do
- it 'updates the missing dismissal information of the vulnerability' do
- expect { subject.perform(vulnerability_1.id, vulnerability_2.id) }.to change { vulnerability_1.reload.dismissed_at }.from(nil)
- .and change { vulnerability_1.reload.dismissed_by_id }.from(nil).to(user.id)
- end
-
- it 'writes log messages' do
- subject.perform(vulnerability_1.id, vulnerability_2.id)
-
- expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
- message: 'Dismissal information has been copied',
- count: 2)
- expect(::Gitlab::BackgroundMigration::Logger).to have_received(:warn).with(migrator: described_class.name,
- message: 'Could not update vulnerability!',
- vulnerability_id: vulnerability_2.id)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb
deleted file mode 100644
index f9628849dbf..00000000000
--- a/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb
+++ /dev/null
@@ -1,141 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulatePersonalSnippetStatistics do
- let(:file_name) { 'file_name.rb' }
- let(:content) { 'content' }
- let(:snippets) { table(:snippets) }
- let(:snippet_repositories) { table(:snippet_repositories) }
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:snippet_statistics) { table(:snippet_statistics) }
- let(:namespace_statistics) { table(:namespace_root_storage_statistics) }
- let(:routes) { table(:routes) }
- let(:repo_size) { 123456 }
- let(:expected_repo_size) { repo_size.megabytes }
-
- let(:user1) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test1') }
- let(:user2) { users.create!(id: 2, email: 'test2@example.com', projects_limit: 100, username: 'test2') }
- let!(:user1_namespace) { namespaces.create!(id: 1, name: 'user1', path: 'user1', owner_id: user1.id) }
- let!(:user2_namespace) { namespaces.create!(id: 2, name: 'user2', path: 'user2', owner_id: user2.id) }
- let(:user1_namespace_statistics) { namespace_statistics.find_by(namespace_id: user1_namespace.id) }
- let(:user2_namespace_statistics) { namespace_statistics.find_by(namespace_id: user2_namespace.id) }
-
- let(:ids) { snippets.pluck(:id) }
- let(:migration) { described_class.new }
-
- subject do
- migration.perform(ids)
- end
-
- before do
- allow_any_instance_of(Repository).to receive(:size).and_return(repo_size)
- end
-
- after do
- snippets.all.each { |s| raw_repository(s).remove }
- end
-
- context 'with existing personal snippets' do
- let!(:snippet1) { create_snippet(1, user1) }
- let!(:snippet2) { create_snippet(2, user1) }
- let!(:snippet3) { create_snippet(3, user2) }
- let!(:snippet4) { create_snippet(4, user2) }
-
- before do
- create_snippet_statistics(2, 0)
- create_snippet_statistics(4, 123)
- end
-
- it 'creates/updates all snippet_statistics' do
- expect { subject }.to change { snippet_statistics.count }.from(2).to(4)
-
- expect(snippet_statistics.pluck(:repository_size)).to be_all(expected_repo_size)
- end
-
- it 'creates/updates the associated namespace statistics' do
- expect(migration).to receive(:update_namespace_statistics).twice.and_call_original
-
- subject
-
- stats = snippet_statistics.where(snippet_id: [snippet1, snippet2]).sum(:repository_size)
- expect(user1_namespace_statistics.snippets_size).to eq stats
-
- stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
- expect(user2_namespace_statistics.snippets_size).to eq stats
- end
-
- context 'when an error is raised when updating a namespace statistics' do
- it 'logs the error and continue execution' do
- expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
- expect(instance).to receive(:execute).with(Namespace.find(user1_namespace.id)).and_raise('Error')
- end
-
- expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
- expect(instance).to receive(:execute).and_call_original
- end
-
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once
- end
-
- subject
-
- expect(user1_namespace_statistics).to be_nil
-
- stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
- expect(user2_namespace_statistics.snippets_size).to eq stats
- end
- end
- end
-
- context 'when a snippet repository is empty' do
- let!(:snippet1) { create_snippet(1, user1, with_repo: false) }
- let!(:snippet2) { create_snippet(2, user1) }
-
- it 'logs error and continues execution' do
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once
- end
-
- subject
-
- expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil
- expect(user1_namespace_statistics.snippets_size).to eq expected_repo_size
- end
- end
-
- def create_snippet(id, author, with_repo: true)
- snippets.create!(id: id, type: 'PersonalSnippet', author_id: author.id, file_name: file_name, content: content).tap do |snippet|
- if with_repo
- allow(snippet).to receive(:disk_path).and_return(disk_path(snippet))
-
- raw_repository(snippet).create_repository
-
- TestEnv.copy_repo(snippet,
- bare_repo: TestEnv.factory_repo_path_bare,
- refs: TestEnv::BRANCH_SHA)
- end
- end
- end
-
- def create_snippet_statistics(snippet_id, repository_size = 0)
- snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size)
- end
-
- def raw_repository(snippet)
- Gitlab::Git::Repository.new('default',
- "#{disk_path(snippet)}.git",
- Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
- "@snippets/#{snippet.id}")
- end
-
- def hashed_repository(snippet)
- Storage::Hashed.new(snippet, prefix: '@snippets')
- end
-
- def disk_path(snippet)
- hashed_repository(snippet).disk_path
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb
deleted file mode 100644
index 7884e0d97c0..00000000000
--- a/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb
+++ /dev/null
@@ -1,224 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateProjectSnippetStatistics do
- let(:file_name) { 'file_name.rb' }
- let(:content) { 'content' }
- let(:snippets) { table(:snippets) }
- let(:snippet_repositories) { table(:snippet_repositories) }
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:snippet_statistics) { table(:snippet_statistics) }
- let(:project_statistics) { table(:project_statistics) }
- let(:projects) { table(:projects) }
- let(:namespace_statistics) { table(:namespace_root_storage_statistics) }
- let(:routes) { table(:routes) }
- let(:repo_size) { 123456 }
- let(:expected_repo_size) { repo_size.megabytes }
-
- let(:user) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test') }
- let(:group) { namespaces.create!(id: 10, type: 'Group', name: 'group1', path: 'group1') }
- let(:user_namespace) { namespaces.create!(id: 20, name: 'user', path: 'user', owner_id: user.id) }
-
- let(:project1) { create_project(1, 'test', group) }
- let(:project2) { create_project(2, 'test1', user_namespace) }
- let(:project3) { create_project(3, 'test2', group) }
-
- let!(:project_stats1) { create_project_statistics(project1) }
- let!(:project_stats2) { create_project_statistics(project2) }
- let!(:project_stats3) { create_project_statistics(project3) }
-
- let(:ids) { snippets.pluck(:id) }
- let(:migration) { described_class.new }
-
- subject do
- migration.perform(ids)
-
- project_stats1.reload if project_stats1.persisted?
- project_stats2.reload if project_stats2.persisted?
- project_stats3.reload if project_stats3.persisted?
- end
-
- before do
- allow_any_instance_of(Repository).to receive(:size).and_return(repo_size)
- end
-
- after do
- snippets.all.each { |s| raw_repository(s).remove }
- end
-
- context 'with existing user and group snippets' do
- let!(:snippet1) { create_snippet(1, project1) }
- let!(:snippet2) { create_snippet(2, project1) }
- let!(:snippet3) { create_snippet(3, project2) }
- let!(:snippet4) { create_snippet(4, project2) }
- let!(:snippet5) { create_snippet(5, project3) }
-
- before do
- create_snippet_statistics(2, 0)
- create_snippet_statistics(4, 123)
- end
-
- it 'creates/updates all snippet_statistics' do
- expect(snippet_statistics.count).to eq 2
-
- subject
-
- expect(snippet_statistics.count).to eq 5
-
- snippet_statistics.all.each do |stat|
- expect(stat.repository_size).to eq expected_repo_size
- end
- end
-
- it 'updates associated snippet project statistics' do
- expect(project_stats1.snippets_size).to be_nil
- expect(project_stats2.snippets_size).to be_nil
-
- subject
-
- snippets_size = snippet_statistics.where(snippet_id: [snippet1.id, snippet2.id]).sum(:repository_size)
- expect(project_stats1.snippets_size).to eq snippets_size
-
- snippets_size = snippet_statistics.where(snippet_id: [snippet3.id, snippet4.id]).sum(:repository_size)
- expect(project_stats2.snippets_size).to eq snippets_size
-
- snippets_size = snippet_statistics.where(snippet_id: snippet5.id).sum(:repository_size)
- expect(project_stats3.snippets_size).to eq snippets_size
- end
-
- it 'forces the project statistics refresh' do
- expect(migration).to receive(:update_project_statistics).exactly(3).times
-
- subject
- end
-
- it 'creates/updates the associated namespace statistics' do
- expect(migration).to receive(:update_namespace_statistics).twice.and_call_original
-
- subject
-
- expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size + project_stats3.snippets_size
- expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size
- end
-
- context 'when the project statistics does not exists' do
- it 'does not raise any error' do
- project_stats3.delete
-
- subject
-
- expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size
- expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size
- end
- end
-
- context 'when an error is raised when updating a project statistics' do
- it 'logs the error and continue execution' do
- expect(migration).to receive(:update_project_statistics).with(Project.find(project1.id)).and_raise('Error')
- expect(migration).to receive(:update_project_statistics).with(Project.find(project2.id)).and_call_original
- expect(migration).to receive(:update_project_statistics).with(Project.find(project3.id)).and_call_original
-
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:error).with(message: /Error updating statistics for project #{project1.id}/).once
- end
-
- subject
-
- expect(project_stats2.snippets_size).not_to be_nil
- expect(project_stats3.snippets_size).not_to be_nil
- end
- end
-
- context 'when an error is raised when updating a namespace statistics' do
- it 'logs the error and continue execution' do
- expect(migration).to receive(:update_namespace_statistics).with(Group.find(group.id)).and_raise('Error')
- expect(migration).to receive(:update_namespace_statistics).with(Namespace.find(user_namespace.id)).and_call_original
-
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once
- end
-
- subject
-
- expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size
- end
- end
- end
-
- context 'when project snippet is in a subgroup' do
- let(:subgroup) { namespaces.create!(id: 30, type: 'Group', name: 'subgroup', path: 'subgroup', parent_id: group.id) }
- let(:project1) { create_project(1, 'test', subgroup, "#{group.path}/#{subgroup.path}/test") }
- let!(:snippet1) { create_snippet(1, project1) }
-
- it 'updates the root namespace statistics' do
- subject
-
- expect(snippet_statistics.count).to eq 1
- expect(project_stats1.snippets_size).to eq snippet_statistics.first.repository_size
- expect(namespace_statistics.find_by(namespace_id: subgroup.id)).to be_nil
- expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size
- end
- end
-
- context 'when a snippet repository is empty' do
- let!(:snippet1) { create_snippet(1, project1, with_repo: false) }
- let!(:snippet2) { create_snippet(2, project1) }
-
- it 'logs error and continues execution' do
- expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once
- end
-
- subject
-
- expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil
- expect(project_stats1.snippets_size).to eq snippet_statistics.find(snippet2.id).repository_size
- end
- end
-
- def create_snippet(id, project, with_repo: true)
- snippets.create!(id: id, type: 'ProjectSnippet', project_id: project.id, author_id: user.id, file_name: file_name, content: content).tap do |snippet|
- if with_repo
- allow(snippet).to receive(:disk_path).and_return(disk_path(snippet))
-
- raw_repository(snippet).create_repository
-
- TestEnv.copy_repo(snippet,
- bare_repo: TestEnv.factory_repo_path_bare,
- refs: TestEnv::BRANCH_SHA)
- end
- end
- end
-
- def create_project(id, name, namespace, path = nil)
- projects.create!(id: id, name: name, path: name.downcase.gsub(/\s/, '_'), namespace_id: namespace.id).tap do |project|
- path ||= "#{namespace.path}/#{project.path}"
- routes.create!(id: id, source_type: 'Project', source_id: project.id, path: path)
- end
- end
-
- def create_snippet_statistics(snippet_id, repository_size = 0)
- snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size)
- end
-
- def create_project_statistics(project, snippets_size = nil)
- project_statistics.create!(id: project.id, project_id: project.id, namespace_id: project.namespace_id, snippets_size: snippets_size)
- end
-
- def raw_repository(snippet)
- Gitlab::Git::Repository.new('default',
- "#{disk_path(snippet)}.git",
- Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
- "@snippets/#{snippet.id}")
- end
-
- def hashed_repository(snippet)
- Storage::Hashed.new(snippet, prefix: '@snippets')
- end
-
- def disk_path(snippet)
- hashed_repository(snippet).disk_path
- end
-end
diff --git a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb
deleted file mode 100644
index 1830a7fc099..00000000000
--- a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizationsWithMinMaxUserId, schema: 20181228175414 do
- let(:users_table) { table(:users) }
- let(:min) { 1 }
- let(:max) { 5 }
-
- before do
- min.upto(max) do |i|
- users_table.create!(id: i, email: "user#{i}@example.com", projects_limit: 10)
- end
- end
-
- describe '#perform' do
- it 'initializes Users::RefreshAuthorizedProjectsService with correct users' do
- min.upto(max) do |i|
- user = User.find(i)
- expect(Users::RefreshAuthorizedProjectsService).to(
- receive(:new).with(user, any_args).and_call_original)
- end
-
- described_class.new.perform(min, max)
- end
-
- it 'executes Users::RefreshAuthorizedProjectsService' do
- expected_call_counts = max - min + 1
-
- service = instance_double(Users::RefreshAuthorizedProjectsService)
- expect(Users::RefreshAuthorizedProjectsService).to(
- receive(:new).exactly(expected_call_counts).times.and_return(service))
- expect(service).to receive(:execute).exactly(expected_call_counts).times
-
- described_class.new.perform(min, max)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
new file mode 100644
index 00000000000..28aa9efde4f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
@@ -0,0 +1,175 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings do
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:users) { table(:users) }
+ let(:user) { create_user! }
+ let(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+ let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') }
+ let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_identifier) do
+ vulnerability_identifiers.create!(
+ id: 1244459,
+ project_id: project.id,
+ external_type: 'vulnerability-identifier',
+ external_id: 'vulnerability-identifier',
+ fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45',
+ name: 'vulnerability identifier')
+ end
+
+ let!(:vulnerability_for_first_duplicate) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:first_finding_duplicate) do
+ create_finding!(
+ id: 5606961,
+ uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e",
+ vulnerability_id: vulnerability_for_first_duplicate.id,
+ report_type: 0,
+ location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner1.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:vulnerability_for_second_duplicate) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:second_finding_duplicate) do
+ create_finding!(
+ id: 8765432,
+ uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5",
+ vulnerability_id: vulnerability_for_second_duplicate.id,
+ report_type: 0,
+ location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner2.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:vulnerability_for_third_duplicate) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:third_finding_duplicate) do
+ create_finding!(
+ id: 8832995,
+ uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4",
+ vulnerability_id: vulnerability_for_third_duplicate.id,
+ report_type: 0,
+ location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner3.id,
+ project_id: project.id
+ )
+ end
+
+ let!(:unrelated_finding) do
+ create_finding!(
+ id: 9999999,
+ uuid: "unreleated_finding",
+ vulnerability_id: nil,
+ report_type: 1,
+ location_fingerprint: 'random_location_fingerprint',
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: unrelated_scanner.id,
+ project_id: project.id
+ )
+ end
+
+ subject { described_class.new.perform(first_finding_duplicate.id, unrelated_finding.id) }
+
+ before do
+ 4.times do
+ create_finding_pipeline!(project_id: project.id, finding_id: first_finding_duplicate.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: second_finding_duplicate.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: third_finding_duplicate.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: unrelated_finding.id)
+ end
+ end
+
+ it 'removes Vulnerabilities::OccurrencePipelines for matching Vulnerabilities::Finding' do
+ expect(vulnerability_findings.count).to eq(4)
+ expect(vulnerability_finding_pipelines.count).to eq(16)
+
+ expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8)
+ .and change(vulnerability_findings, :count).from(4).to(2)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ id: nil,
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ params = {
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: vulnerability_identifier.id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ }
+ params[:id] = id unless id.nil?
+ vulnerability_findings.create!(params)
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now)
+ table(:users).create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: confirmed_at
+ )
+ end
+
+ def create_finding_pipeline!(project_id:, finding_id:)
+ pipeline = table(:ci_pipelines).create!(project_id: project_id)
+ vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
new file mode 100644
index 00000000000..fadee64886f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::RemoveVulnerabilityFindingLinks, :migration, schema: 20211104165220 do
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:finding_links) { table(:vulnerability_finding_links) }
+
+ let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'scanner', name: 'scanner') }
+ let(:vulnerability_identifier) do
+ table(:vulnerability_identifiers).create!(
+ project_id: project.id,
+ external_type: 'vulnerability-identifier',
+ external_id: 'vulnerability-identifier',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'vulnerability identifier')
+ end
+
+ # vulnerability findings
+ let!(:findings) do
+ Array.new(2) do |id|
+ vulnerability_findings.create!(
+ project_id: project.id,
+ name: 'Vulnerability Name',
+ severity: 7,
+ confidence: 7,
+ report_type: 0,
+ project_fingerprint: '123qweasdzxc',
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identifier.id,
+ location_fingerprint: "location_fingerprint_#{id}",
+ metadata_version: 'metadata_version',
+ raw_metadata: 'raw_metadata',
+ uuid: "uuid_#{id}"
+ )
+ end
+ end
+
+ # vulnerability finding links
+ let!(:links) do
+ {
+ findings.first => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.first.id, name: "Link Name 1", url: "link_url1.example") },
+ findings.second => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.second.id, name: "Link Name 2", url: "link_url2.example") }
+ }
+ end
+
+ it 'removes vulnerability links' do
+ expect do
+ subject.perform(links[findings.first].first.id, links[findings.second].last.id)
+ end.to change { finding_links.count }.from(10).to(0)
+
+ expect(finding_links.all).to be_empty
+ end
+
+ it 'only deletes vulnerability links for the current batch' do
+ expected_links = [finding_links.where(vulnerability_occurrence_id: findings.second.id)].flatten
+
+ expect do
+ subject.perform(links[findings.first].first.id, links[findings.first].last.id)
+ end.to change { finding_links.count }.from(10).to(5)
+
+ expect(finding_links.all).to match_array(expected_links)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
deleted file mode 100644
index 6cfdbb5a14e..00000000000
--- a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20181228175414 do
- let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { table(:projects).create!(namespace_id: namespace.id, name: 'gitlab') }
- let(:issue1) { table(:issues).create!(project_id: project.id, title: 'a') }
- let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') }
- let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') }
- let(:issue_links) { table(:issue_links) }
- let!(:blocked_link1) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) }
- let!(:opposite_link1) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) }
- let!(:blocked_link2) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) }
- let!(:opposite_link2) { issue_links.create!(source_id: issue3.id, target_id: issue1.id, link_type: 0) }
- let!(:nochange_link) { issue_links.create!(source_id: issue2.id, target_id: issue3.id, link_type: 1) }
-
- subject { described_class.new.perform(issue_links.minimum(:id), issue_links.maximum(:id)) }
-
- it 'deletes any opposite relations' do
- subject
-
- expect(issue_links.ids).to match_array([nochange_link.id, blocked_link1.id, blocked_link2.id])
- end
-
- it 'ignores issue links other than blocked_by' do
- subject
-
- expect(nochange_link.reload.link_type).to eq(1)
- end
-
- it 'updates blocked_by issue links' do
- subject
-
- expect(blocked_link1.reload.link_type).to eq(1)
- expect(blocked_link1.source_id).to eq(issue1.id)
- expect(blocked_link1.target_id).to eq(issue2.id)
- expect(blocked_link2.reload.link_type).to eq(1)
- expect(blocked_link2.source_id).to eq(issue3.id)
- expect(blocked_link2.target_id).to eq(issue1.id)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb b/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
deleted file mode 100644
index 2f5074649c4..00000000000
--- a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetMergeStatus do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
- let(:merge_requests) { table(:merge_requests) }
-
- def create_merge_request(id, extra_params = {})
- params = {
- id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}"
- }.merge(extra_params)
-
- merge_requests.create!(params)
- end
-
- it 'correctly updates opened mergeable MRs to unchecked' do
- create_merge_request(1, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
- create_merge_request(2, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
- create_merge_request(3, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
- create_merge_request(4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged')
- create_merge_request(5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged')
-
- subject.perform(1, 5)
-
- expected_rows = [
- { id: 1, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
- { id: 2, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
- { id: 3, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
- { id: 4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged' },
- { id: 5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged' }
- ]
-
- rows = merge_requests.order(:id).map do |row|
- row.attributes.slice('id', 'state_id', 'merge_status').symbolize_keys
- end
-
- expect(rows).to eq(expected_rows)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb
deleted file mode 100644
index ef90b5674f0..00000000000
--- a/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetSharedRunnersForTransferredProjects, schema: 20181228175414 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- let(:namespace_1) { namespaces.create!(name: 'foo', path: 'foo', shared_runners_enabled: true, allow_descendants_override_disabled_shared_runners: false ) }
- let(:namespace_2) { namespaces.create!(name: 'foo', path: 'foo', shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false ) }
- let(:namespace_3) { namespaces.create!(name: 'bar', path: 'bar', shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true ) }
- let(:project_1_1) { projects.create!(namespace_id: namespace_1.id, shared_runners_enabled: true) }
- let(:project_1_2) { projects.create!(namespace_id: namespace_1.id, shared_runners_enabled: false) }
- let(:project_2_1) { projects.create!(namespace_id: namespace_2.id, shared_runners_enabled: true) }
- let(:project_2_2) { projects.create!(namespace_id: namespace_2.id, shared_runners_enabled: false) }
- let(:project_3_1) { projects.create!(namespace_id: namespace_3.id, shared_runners_enabled: true) }
- let(:project_3_2) { projects.create!(namespace_id: namespace_3.id, shared_runners_enabled: false) }
-
- it 'corrects each project shared_runners_enabled column' do
- expect do
- described_class.new.perform(namespace_1.id, namespace_3.id)
- project_1_1.reload
- project_1_2.reload
- project_2_1.reload
- project_2_2.reload
- project_3_1.reload
- project_3_2.reload
- end.to not_change(project_1_1, :shared_runners_enabled).from(true)
- .and not_change(project_1_2, :shared_runners_enabled).from(false)
- .and change(project_2_1, :shared_runners_enabled).from(true).to(false)
- .and not_change(project_2_2, :shared_runners_enabled).from(false)
- .and not_change(project_3_1, :shared_runners_enabled).from(true)
- .and not_change(project_3_2, :shared_runners_enabled).from(false)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb
deleted file mode 100644
index 1fdbdf25706..00000000000
--- a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schema: 20181228175414 do
- let(:merge_request_diff_files) { table(:merge_request_diff_files) }
- let(:merge_request_diffs) { table(:merge_request_diffs) }
- let(:merge_requests) { table(:merge_requests) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
- let(:project) { projects.create!(namespace_id: namespace.id) }
- let(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) }
-
- let!(:empty_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) }
- let!(:filled_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) }
-
- let!(:filled_diff_files) do
- 1.upto(3).map do |n|
- merge_request_diff_files.create!(
- merge_request_diff_id: filled_diff.id,
- relative_order: n,
- new_file: false,
- renamed_file: false,
- deleted_file: false,
- too_large: false,
- a_mode: '',
- b_mode: '',
- old_path: '',
- new_path: ''
- )
- end
- end
-
- it 'fills the files_count column' do
- described_class.new.perform(empty_diff.id, filled_diff.id)
-
- expect(empty_diff.reload.files_count).to eq(0)
- expect(filled_diff.reload.files_count).to eq(3)
- end
-
- it 'uses the sentinel value if the actual count is too high' do
- stub_const("#{described_class}::FILES_COUNT_SENTINEL", filled_diff_files.size - 1)
-
- described_class.new.perform(empty_diff.id, filled_diff.id)
-
- expect(empty_diff.reload.files_count).to eq(0)
- expect(filled_diff.reload.files_count).to eq(described_class::FILES_COUNT_SENTINEL)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
deleted file mode 100644
index de9799c3642..00000000000
--- a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, schema: 20181228175414 do
- include MigrationHelpers::NamespacesHelpers
-
- context 'private visibility level' do
- it 'updates the project visibility' do
- parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
- child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
-
- expect { subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::PRIVATE)
- end
-
- it 'updates sub-sub groups' do
- parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
- middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PRIVATE, parent_id: parent.id)
- child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
-
- subject.perform([parent.id, middle_group.id], Gitlab::VisibilityLevel::PRIVATE)
-
- expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
-
- it 'updates all sub groups' do
- parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE)
- middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
- child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id)
-
- subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE)
-
- expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- expect(middle_group.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
- end
-
- context 'internal visibility level' do
- it 'updates the project visibility' do
- parent = create_namespace('parent', Gitlab::VisibilityLevel::INTERNAL)
- child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id)
-
- expect { subject.perform([parent.id], Gitlab::VisibilityLevel::INTERNAL) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb b/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb
deleted file mode 100644
index 33f5e38100e..00000000000
--- a/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::UpdateExistingUsersThatRequireTwoFactorAuth, schema: 20181228175414 do
- include MigrationHelpers::NamespacesHelpers
-
- let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE) }
- let(:group_with_2fa_child) { create_namespace('child', Gitlab::VisibilityLevel::PRIVATE, parent_id: group_with_2fa_parent.id) }
- let(:members_table) { table(:members) }
- let(:users_table) { table(:users) }
-
- subject { described_class.new }
-
- describe '#perform' do
- context 'with group members' do
- let(:user_1) { create_user('user@example.com') }
- let!(:member) { create_group_member(user_1, group_with_2fa_parent) }
- let!(:user_without_group) { create_user('user_without@example.com') }
- let(:user_other) { create_user('user_other@example.com') }
- let!(:member_other) { create_group_member(user_other, group_with_2fa_parent) }
-
- it 'updates user when user should not be required to establish two factor authentication' do
- subject.perform(user_1.id, user_without_group.id)
-
- expect(user_1.reload.require_two_factor_authentication_from_group).to eq(false)
- end
-
- it 'does not update user when user is member of group that requires two factor authentication' do
- group = create_namespace('other', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true)
- create_group_member(user_1, group)
-
- subject.perform(user_1.id, user_without_group.id)
-
- expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true)
- end
-
- it 'does not update user who is not in current batch' do
- subject.perform(user_1.id, user_without_group.id)
-
- expect(user_other.reload.require_two_factor_authentication_from_group).to eq(true)
- end
-
- it 'updates all users in current batch' do
- subject.perform(user_1.id, user_other.id)
-
- expect(user_other.reload.require_two_factor_authentication_from_group).to eq(false)
- end
-
- it 'does not update user when user is member of group which parent group requires two factor authentication' do
- group_with_2fa_parent.update!(require_two_factor_authentication: true)
- subject.perform(user_1.id, user_other.id)
-
- expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true)
- end
-
- it 'does not update user when user is member of group which has subgroup that requires two factor authentication' do
- create_namespace('subgroup', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true, parent_id: group_with_2fa_child.id)
-
- subject.perform(user_1.id, user_other.id)
-
- expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true)
- end
- end
- end
-
- def create_user(email, require_2fa: true)
- users_table.create!(email: email, projects_limit: 10, require_two_factor_authentication_from_group: require_2fa)
- end
-
- def create_group_member(user, group)
- members_table.create!(user_id: user.id, source_id: group.id, access_level: GroupMember::MAINTAINER, source_type: "Namespace", type: "GroupMember", notification_level: 3)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb
new file mode 100644
index 00000000000..982e3319063
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsNullSpentAt, schema: 20211215090620 do
+ let_it_be(:previous_time) { 10.days.ago }
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
+ let_it_be(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let_it_be(:issue) { table(:issues).create!(project_id: project.id) }
+ let_it_be(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') }
+ let_it_be(:timelog1) { create_timelog!(issue_id: issue.id) }
+ let_it_be(:timelog2) { create_timelog!(merge_request_id: merge_request.id) }
+ let_it_be(:timelog3) { create_timelog!(issue_id: issue.id, spent_at: previous_time) }
+ let_it_be(:timelog4) { create_timelog!(merge_request_id: merge_request.id, spent_at: previous_time) }
+
+ subject(:background_migration) { described_class.new }
+
+ before_all do
+ table(:timelogs).where.not(id: [timelog3.id, timelog4.id]).update_all(spent_at: nil)
+ end
+
+ describe '#perform' do
+ it 'sets correct spent_at' do
+ background_migration.perform(timelog1.id, timelog4.id)
+
+ expect(timelog1.reload.spent_at).to be_like_time(timelog1.created_at)
+ expect(timelog2.reload.spent_at).to be_like_time(timelog2.created_at)
+ expect(timelog3.reload.spent_at).to be_like_time(previous_time)
+ expect(timelog4.reload.spent_at).to be_like_time(previous_time)
+ expect(timelog3.reload.spent_at).not_to be_like_time(timelog3.created_at)
+ expect(timelog4.reload.spent_at).not_to be_like_time(timelog4.created_at)
+ end
+ end
+
+ private
+
+ def create_timelog!(**args)
+ table(:timelogs).create!(**args, time_spent: 1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
deleted file mode 100644
index 7af11ffa1e0..00000000000
--- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20181228175414 do
- context 'checks no_quote_columns' do
- it 'has correct no_quote_columns' do
- expect(Gitlab::BackgroundMigration::UserMentions::Models::MergeRequest.no_quote_columns).to match([:note_id, :merge_request_id])
- end
-
- it 'commit has correct no_quote_columns' do
- expect(Gitlab::BackgroundMigration::UserMentions::Models::Commit.no_quote_columns).to match([:note_id])
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb
index 777dc8112a7..8dd7f6892a6 100644
--- a/spec/lib/gitlab/background_migration_spec.rb
+++ b/spec/lib/gitlab/background_migration_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration do
- let(:coordinator) { described_class::JobCoordinator.for_database(:main) }
+ let(:default_tracking_database) { described_class::DEFAULT_TRACKING_DATABASE }
+ let(:coordinator) { described_class::JobCoordinator.for_tracking_database(default_tracking_database) }
before do
allow(described_class).to receive(:coordinator_for_database)
- .with(:main)
+ .with(default_tracking_database)
.and_return(coordinator)
end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
index f9313f0ff28..0380ddd9a2e 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
@@ -27,20 +27,26 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
end
describe '#import_repository' do
+ let(:repo_url) { 'http://bitbucket:test@my-bitbucket' }
+
+ before do
+ expect(project.repository).to receive(:import_repository).with(repo_url)
+ end
+
it 'adds a remote' do
expect(subject).to receive(:import_pull_requests)
expect(subject).to receive(:delete_temp_branches)
expect(project.repository).to receive(:fetch_as_mirror)
- .with('http://bitbucket:test@my-bitbucket',
- refmap: [:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head'])
+ .with(repo_url,
+ refmap: ['+refs/pull-requests/*/to:refs/merge-requests/*/head'])
subject.execute
end
- it 'raises a Gitlab::Shell exception in the fetch' do
- expect(project.repository).to receive(:fetch_as_mirror).and_raise(Gitlab::Shell::Error)
+ it 'raises a Gitlab::Git::CommandError in the fetch' do
+ expect(project.repository).to receive(:fetch_as_mirror).and_raise(::Gitlab::Git::CommandError)
- expect { subject.execute }.to raise_error(Gitlab::Shell::Error)
+ expect { subject.execute }.to raise_error(::Gitlab::Git::CommandError)
end
it 'raises an unhandled exception in the fetch' do
diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb
index 46447231424..7f862a3b80a 100644
--- a/spec/lib/gitlab/ci/build/context/build_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/build_spec.rb
@@ -8,11 +8,7 @@ RSpec.describe Gitlab::Ci::Build::Context::Build do
let(:context) { described_class.new(pipeline, seed_attributes) }
- describe '#variables' do
- subject { context.variables.to_hash }
-
- it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
-
+ shared_examples 'variables collection' do
it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) }
it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
@@ -27,4 +23,20 @@ RSpec.describe Gitlab::Ci::Build::Context::Build do
it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
end
end
+
+ describe '#variables' do
+ subject { context.variables.to_hash }
+
+ it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
+
+ it_behaves_like 'variables collection'
+ end
+
+ describe '#variables_hash' do
+ subject { context.variables_hash }
+
+ it { expect(context.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) }
+
+ it_behaves_like 'variables collection'
+ end
end
diff --git a/spec/lib/gitlab/ci/build/context/global_spec.rb b/spec/lib/gitlab/ci/build/context/global_spec.rb
index 61f2b90426d..d4141eb8389 100644
--- a/spec/lib/gitlab/ci/build/context/global_spec.rb
+++ b/spec/lib/gitlab/ci/build/context/global_spec.rb
@@ -8,11 +8,7 @@ RSpec.describe Gitlab::Ci::Build::Context::Global do
let(:context) { described_class.new(pipeline, yaml_variables: yaml_variables) }
- describe '#variables' do
- subject { context.variables.to_hash }
-
- it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
-
+ shared_examples 'variables collection' do
it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') }
it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) }
it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) }
@@ -26,4 +22,20 @@ RSpec.describe Gitlab::Ci::Build::Context::Global do
it { is_expected.to include('SUPPORTED' => 'parsed') }
end
end
+
+ describe '#variables' do
+ subject { context.variables.to_hash }
+
+ it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
+
+ it_behaves_like 'variables collection'
+ end
+
+ describe '#variables_hash' do
+ subject { context.variables_hash }
+
+ it { is_expected.to be_instance_of(ActiveSupport::HashWithIndifferentAccess) }
+
+ it_behaves_like 'variables collection'
+ end
end
diff --git a/spec/lib/gitlab/ci/build/policy/variables_spec.rb b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
index 6c8c968dc0c..436ad59bdf7 100644
--- a/spec/lib/gitlab/ci/build/policy/variables_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Variables do
let(:seed) do
double('build seed',
to_resource: ci_build,
- variables: ci_build.scoped_variables
+ variables_hash: ci_build.scoped_variables.to_hash
)
end
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Variables do
let(:seed) do
double('bridge seed',
to_resource: bridge,
- variables: ci_build.scoped_variables
+ variables_hash: ci_build.scoped_variables.to_hash
)
end
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
index d20ea6c9202..532c83f6768 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
@@ -33,12 +33,12 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
end
context 'when context has the specified variables' do
- let(:variables) do
- [{ key: "HELM_DIR", value: "helm", public: true }]
+ let(:variables_hash) do
+ { 'HELM_DIR' => 'helm' }
end
before do
- allow(context).to receive(:variables).and_return(variables)
+ allow(context).to receive(:variables_hash).and_return(variables_hash)
end
it { is_expected.to be_truthy }
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
let(:modified_paths) { ['path/with/$in/it/file.txt'] }
before do
- allow(context).to receive(:variables).and_return([])
+ allow(context).to receive(:variables_hash).and_return({})
end
it { is_expected.to be_truthy }
diff --git a/spec/lib/gitlab/ci/build/rules/rule_spec.rb b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
index 6f3c9278677..f905e229415 100644
--- a/spec/lib/gitlab/ci/build/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule do
let(:seed) do
double('build seed',
to_resource: ci_build,
- variables: ci_build.scoped_variables
+ variables_hash: ci_build.scoped_variables.to_hash
)
end
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index 1d5bdf30278..37bfdca4d1d 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Rules do
- let(:pipeline) { create(:ci_pipeline) }
- let(:ci_build) { build(:ci_build, pipeline: pipeline) }
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:ci_build) { build(:ci_build, pipeline: pipeline) }
let(:seed) do
double('build seed',
to_resource: ci_build,
- variables: ci_build.scoped_variables
+ variables_hash: ci_build.scoped_variables.to_hash
)
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 6c9c8fa5df5..62feed3dda0 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -163,7 +163,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
})
end
- it { is_expected.not_to be_valid }
+ it { is_expected.to be_valid }
end
context 'when bridge configuration uses rules with only' do
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 0bb26babfc0..885f3eaff79 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -118,6 +118,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
+ context 'when config uses both "when:" and "rules:"' do
+ let(:config) do
+ {
+ script: 'echo',
+ when: 'on_failure',
+ rules: [{ if: '$VARIABLE', when: 'on_success' }]
+ }
+ end
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
context 'when delayed job' do
context 'when start_in is specified' do
let(:config) { { script: 'echo', when: 'delayed', start_in: '1 week' } }
@@ -268,21 +282,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'when it uses both "when:" and "rules:"' do
- let(:config) do
- {
- script: 'echo',
- when: 'on_failure',
- rules: [{ if: '$VARIABLE', when: 'on_success' }]
- }
- end
-
- it 'returns an error about when: being combined with rules' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'job config key may not be used with `rules`: when'
- end
- end
-
context 'when delayed job' do
context 'when start_in is specified' do
let(:config) { { script: 'echo', when: 'delayed', start_in: '1 week' } }
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index c9c28e2eb8b..5b9337ede34 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -33,6 +33,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
+ context 'when config uses both "when:" and "rules:"' do
+ let(:config) do
+ {
+ script: 'echo',
+ when: 'on_failure',
+ rules: [{ if: '$VARIABLE', when: 'on_success' }]
+ }
+ end
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
context 'when job name is more than 255' do
let(:entry) { node_class.new(config, name: ('a' * 256).to_sym) }
@@ -90,21 +104,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
- context 'when it uses both "when:" and "rules:"' do
- let(:config) do
- {
- script: 'echo',
- when: 'on_failure',
- rules: [{ if: '$VARIABLE', when: 'on_success' }]
- }
- end
-
- it 'returns an error about when: being combined with rules' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'job config key may not be used with `rules`: when'
- end
- end
-
context 'when only: is used with rules:' do
let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
diff --git a/spec/lib/gitlab/ci/config/entry/tags_spec.rb b/spec/lib/gitlab/ci/config/entry/tags_spec.rb
index 79317de373b..e05d4ae52b2 100644
--- a/spec/lib/gitlab/ci/config/entry/tags_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/tags_spec.rb
@@ -36,25 +36,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Tags do
context 'when tags limit is reached' do
let(:config) { Array.new(50) {|i| "tag-#{i}" } }
- context 'when ci_build_tags_limit is enabled' do
- before do
- stub_feature_flags(ci_build_tags_limit: true)
- end
-
- it 'reports error' do
- expect(entry.errors)
- .to include "tags config must be less than the limit of #{described_class::TAGS_LIMIT} tags"
- end
- end
-
- context 'when ci_build_tags_limit is disabled' do
- before do
- stub_feature_flags(ci_build_tags_limit: false)
- end
-
- it 'does not report an error' do
- expect(entry.errors).to be_empty
- end
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "tags config must be less than the limit of #{described_class::TAGS_LIMIT} tags"
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index 4b9adf7e87b..800c563cd0b 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
let(:project) { double('Project') }
let(:user) { double('User') }
let(:sha) { '12345' }
- let(:attributes) { { project: project, user: user, sha: sha } }
+ let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'a', 'value' => 'b' }]) }
+ let(:attributes) { { project: project, user: user, sha: sha, variables: variables } }
subject(:subject) { described_class.new(**attributes) }
@@ -15,6 +16,9 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
it { is_expected.to have_attributes(**attributes) }
it { expect(subject.expandset).to eq(Set.new) }
it { expect(subject.execution_deadline).to eq(0) }
+ it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
+ it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) }
+ it { expect(subject.variables_hash).to include('a' => 'b') }
end
context 'without values' do
@@ -23,6 +27,8 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
it { is_expected.to have_attributes(**attributes) }
it { expect(subject.expandset).to eq(Set.new) }
it { expect(subject.execution_deadline).to eq(0) }
+ it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) }
+ it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) }
end
end
@@ -94,6 +100,15 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
end
describe '#mutate' do
+ let(:attributes) do
+ {
+ project: project,
+ user: user,
+ sha: sha,
+ logger: double('logger')
+ }
+ end
+
shared_examples 'a mutated context' do
let(:mutated) { subject.mutate(new_attributes) }
@@ -107,6 +122,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
it { expect(mutated).to have_attributes(new_attributes) }
it { expect(mutated.expandset).to eq(subject.expandset) }
it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) }
+ it { expect(mutated.logger).to eq(mutated.logger) }
end
context 'with attributes' do
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 2e9e6f95071..97bd74721f2 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
include StubRequests
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:another_project) { create(:project, :repository) }
+ let_it_be_with_reload(:another_project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
@@ -251,6 +251,17 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
it 'properly expands all includes' do
is_expected.to include(:my_build, :remote_build, :rspec)
end
+
+ it 'propagates the pipeline logger' do
+ processor.perform
+
+ process_obs_count = processor
+ .logger
+ .observations_hash
+ .dig('config_mapper_process_duration_s', 'count')
+
+ expect(process_obs_count).to eq(3)
+ end
end
context 'when user is reporter of another project' do
diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb
index 1e42cb30ae7..091bd3b07e6 100644
--- a/spec/lib/gitlab/ci/config/external/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do
subject(:rules) { described_class.new(rule_hashes) }
describe '#evaluate' do
- let(:context) { double(variables: {}) }
+ let(:context) { double(variables_hash: {}) }
subject(:result) { rules.evaluate(context).pass? }
@@ -20,13 +20,13 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do
let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] }
context 'when the rule matches' do
- let(:context) { double(variables: { MY_VAR: 'hello' }) }
+ let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) }
it { is_expected.to eq(true) }
end
context 'when the rule does not match' do
- let(:context) { double(variables: { MY_VAR: 'invalid' }) }
+ let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) }
it { is_expected.to eq(false) }
end
diff --git a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
index f487fccdab7..60b4e01f382 100644
--- a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::Ci::Parsers::Terraform::Tfplan do
'create' => 0,
'update' => 1,
'delete' => 0,
- 'job_name' => artifact.job.options.dig(:artifacts, :name).to_s
+ 'job_name' => artifact.job.name
)
)
)
@@ -124,7 +124,7 @@ RSpec.describe Gitlab::Ci::Parsers::Terraform::Tfplan do
'create' => 0,
'update' => 1,
'delete' => 0,
- 'job_name' => artifact.job.options.dig(:artifacts, :name).to_s
+ 'job_name' => artifact.job.name
)
)
)
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
new file mode 100644
index 00000000000..28bc685286f
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) }
+ let(:pipeline) { create(:ci_pipeline, project: project, stages: [stage]) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ describe '#perform!' do
+ subject { step.perform! }
+
+ before do
+ job.pipeline = pipeline
+ end
+
+ context 'when a pipeline contains a deployment job' do
+ let!(:job) { build(:ci_build, :start_review_app, project: project) }
+ let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) }
+
+ it 'creates a deployment record' do
+ expect { subject }.to change { Deployment.count }.by(1)
+
+ job.reset
+ expect(job.deployment.project).to eq(job.project)
+ expect(job.deployment.ref).to eq(job.ref)
+ expect(job.deployment.sha).to eq(job.sha)
+ expect(job.deployment.deployable).to eq(job)
+ expect(job.deployment.deployable_type).to eq('CommitStatus')
+ expect(job.deployment.environment).to eq(job.persisted_environment)
+ end
+
+ context 'when creation failure occures' do
+ before do
+ allow_next_instance_of(Deployment) do |deployment|
+ allow(deployment).to receive(:save!) { raise ActiveRecord::RecordInvalid }
+ end
+ end
+
+ it 'trackes the exception' do
+ expect { subject }.to raise_error(described_class::DeploymentCreationError)
+
+ expect(Deployment.count).to eq(0)
+ end
+ end
+
+ context 'when the corresponding environment does not exist' do
+ let!(:environment) { }
+
+ it 'does not create a deployment record' do
+ expect { subject }.not_to change { Deployment.count }
+
+ expect(job.deployment).to be_nil
+ end
+ end
+
+ context 'when create_deployment_in_separate_transaction feature flag is disabled' do
+ before do
+ stub_feature_flags(create_deployment_in_separate_transaction: false)
+ end
+
+ it 'does not create a deployment record' do
+ expect { subject }.not_to change { Deployment.count }
+
+ expect(job.deployment).to be_nil
+ end
+ end
+ end
+
+ context 'when a pipeline contains a teardown job' do
+ let!(:job) { build(:ci_build, :stop_review_app, project: project) }
+ let!(:environment) { create(:environment, name: job.expanded_environment_name) }
+
+ it 'does not create a deployment record' do
+ expect { subject }.not_to change { Deployment.count }
+
+ expect(job.deployment).to be_nil
+ end
+ end
+
+ context 'when a pipeline does not contain a deployment job' do
+ let!(:job) { build(:ci_build, project: project) }
+
+ it 'does not create any deployments' do
+ expect { subject }.not_to change { Deployment.count }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
index d60ecc80a6e..4206483b228 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -56,4 +56,74 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
.to include /Failed to persist the pipeline/
end
end
+
+ context 'tags persistence' do
+ let(:stage) do
+ build(:ci_stage_entity, pipeline: pipeline)
+ end
+
+ let(:job) do
+ build(:ci_build, stage: stage, pipeline: pipeline, project: project)
+ end
+
+ let(:bridge) do
+ build(:ci_bridge, stage: stage, pipeline: pipeline, project: project)
+ end
+
+ before do
+ pipeline.stages = [stage]
+ stage.statuses = [job, bridge]
+ end
+
+ context 'without tags' do
+ it 'extracts an empty tag list' do
+ expect(CommitStatus)
+ .to receive(:bulk_insert_tags!)
+ .with(stage.statuses, {})
+ .and_call_original
+
+ step.perform!
+
+ expect(job.instance_variable_defined?(:@tag_list)).to be_falsey
+ expect(job).to be_persisted
+ expect(job.tag_list).to eq([])
+ end
+ end
+
+ context 'with tags' do
+ before do
+ job.tag_list = %w[tag1 tag2]
+ end
+
+ it 'bulk inserts tags' do
+ expect(CommitStatus)
+ .to receive(:bulk_insert_tags!)
+ .with(stage.statuses, { job.name => %w[tag1 tag2] })
+ .and_call_original
+
+ step.perform!
+
+ expect(job.instance_variable_defined?(:@tag_list)).to be_falsey
+ expect(job).to be_persisted
+ expect(job.tag_list).to match_array(%w[tag1 tag2])
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ job.tag_list = %w[tag1 tag2]
+ stub_feature_flags(ci_bulk_insert_tags: false)
+ end
+
+ it 'follows the old code path' do
+ expect(CommitStatus).not_to receive(:bulk_insert_tags!)
+
+ step.perform!
+
+ expect(job.instance_variable_defined?(:@tag_list)).to be_truthy
+ expect(job).to be_persisted
+ expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb
new file mode 100644
index 00000000000..253928e1a19
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::EnsureEnvironments do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) }
+ let(:pipeline) { build(:ci_pipeline, project: project, stages: [stage]) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ describe '#perform!' do
+ subject { step.perform! }
+
+ before do
+ job.pipeline = pipeline
+ end
+
+ context 'when a pipeline contains a deployment job' do
+ let!(:job) { build(:ci_build, :start_review_app, project: project) }
+
+ it 'ensures environment existence for the job' do
+ expect { subject }.to change { Environment.count }.by(1)
+
+ expect(project.environments.find_by_name('review/master')).to be_present
+ expect(job.persisted_environment.name).to eq('review/master')
+ expect(job.metadata.expanded_environment_name).to eq('review/master')
+ end
+
+ context 'when an environment has already been existed' do
+ before do
+ create(:environment, project: project, name: 'review/master')
+ end
+
+ it 'ensures environment existence for the job' do
+ expect { subject }.not_to change { Environment.count }
+
+ expect(project.environments.find_by_name('review/master')).to be_present
+ expect(job.persisted_environment.name).to eq('review/master')
+ expect(job.metadata.expanded_environment_name).to eq('review/master')
+ end
+ end
+
+ context 'when an environment name contains an invalid character' do
+ let(:pipeline) { build(:ci_pipeline, ref: '!!!', project: project, stages: [stage]) }
+
+ it 'sets the failure status' do
+ expect { subject }.not_to change { Environment.count }
+
+ expect(job).to be_failed
+ expect(job).to be_environment_creation_failure
+ expect(job.persisted_environment).to be_nil
+ end
+ end
+
+ context 'when create_deployment_in_separate_transaction feature flag is disabled' do
+ before do
+ stub_feature_flags(create_deployment_in_separate_transaction: false)
+ end
+
+ it 'does not create any environments' do
+ expect { subject }.not_to change { Environment.count }
+
+ expect(job.persisted_environment).to be_nil
+ end
+ end
+ end
+
+ context 'when a pipeline contains a teardown job' do
+ let!(:job) { build(:ci_build, :stop_review_app, project: project) }
+
+ it 'ensures environment existence for the job' do
+ expect { subject }.to change { Environment.count }.by(1)
+
+ expect(project.environments.find_by_name('review/master')).to be_present
+ expect(job.persisted_environment.name).to eq('review/master')
+ expect(job.metadata.expanded_environment_name).to eq('review/master')
+ end
+ end
+
+ context 'when a pipeline does not contain a deployment job' do
+ let!(:job) { build(:ci_build, project: project) }
+
+ it 'does not create any environments' do
+ expect { subject }.not_to change { Environment.count }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb
new file mode 100644
index 00000000000..87df5a3e21b
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) }
+ let(:pipeline) { build(:ci_pipeline, project: project, stages: [stage]) }
+ let!(:environment) { create(:environment, name: 'production', project: project) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ describe '#perform!' do
+ subject { step.perform! }
+
+ before do
+ job.pipeline = pipeline
+ end
+
+ context 'when a pipeline contains a job that requires a resource group' do
+ let!(:job) do
+ build(:ci_build, project: project, environment: 'production', options: { resource_group_key: '$CI_ENVIRONMENT_NAME' })
+ end
+
+ it 'ensures the resource group existence' do
+ expect { subject }.to change { Ci::ResourceGroup.count }.by(1)
+
+ expect(project.resource_groups.find_by_key('production')).to be_present
+ expect(job.resource_group.key).to eq('production')
+ expect(job.options[:resource_group_key]).to be_nil
+ end
+
+ context 'when a resource group has already been existed' do
+ before do
+ create(:ci_resource_group, project: project, key: 'production')
+ end
+
+ it 'ensures the resource group existence' do
+ expect { subject }.not_to change { Ci::ResourceGroup.count }
+
+ expect(project.resource_groups.find_by_key('production')).to be_present
+ expect(job.resource_group.key).to eq('production')
+ expect(job.options[:resource_group_key]).to be_nil
+ end
+ end
+
+ context 'when a resource group key contains an invalid character' do
+ let!(:job) do
+ build(:ci_build, project: project, environment: '!!!', options: { resource_group_key: '$CI_ENVIRONMENT_NAME' })
+ end
+
+ it 'does not create any resource groups' do
+ expect { subject }.not_to change { Ci::ResourceGroup.count }
+
+ expect(job.resource_group).to be_nil
+ end
+ end
+
+ context 'when create_deployment_in_separate_transaction feature flag is disabled' do
+ before do
+ stub_feature_flags(create_deployment_in_separate_transaction: false)
+ end
+
+ it 'does not create any resource groups' do
+ expect { subject }.not_to change { Ci::ResourceGroup.count }
+
+ expect(job.resource_group).to be_nil
+ end
+ end
+ end
+
+ context 'when a pipeline does not contain a job that requires a resource group' do
+ let!(:job) { build(:ci_build, project: project) }
+
+ it 'does not create any resource groups' do
+ expect { subject }.not_to change { Ci::ResourceGroup.count }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index cf21c98dbd5..cebc4c02d11 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
second_stage_job_name:
stage: second_stage
services:
+ -
- postgres
before_script:
- echo 'first hello'
@@ -142,6 +143,23 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
perform!
end
+
+ it 'returns expected payload' do
+ expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
+ payload = Gitlab::Json.parse(params[:body])
+
+ builds = payload['builds']
+ expect(builds.count).to eq(2)
+ expect(builds[0]['services']).to be_nil
+ expect(builds[0]['stage']).to eq('first_stage')
+ expect(builds[0]['image']).to eq('hello_world')
+ expect(builds[1]['services']).to eq(['postgres'])
+ expect(builds[1]['stage']).to eq('second_stage')
+ expect(builds[1]['image']).to be_nil
+ end
+
+ perform!
+ end
end
context 'when EXTERNAL_VALIDATION_SERVICE_TOKEN is set' do
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb
index 115674edc48..3e10ca686ba 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb
@@ -17,30 +17,33 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Variable do
end
describe '#evaluate' do
- it 'returns variable value if it is defined' do
- variable = described_class.new('VARIABLE')
+ let(:lexeme) { described_class.new('VARIABLE') }
- expect(variable.evaluate(VARIABLE: 'my variable'))
+ it 'returns variable value if it is defined' do
+ expect(lexeme.evaluate(VARIABLE: 'my variable'))
.to eq 'my variable'
end
it 'allows to use a string as a variable key too' do
- variable = described_class.new('VARIABLE')
-
- expect(variable.evaluate('VARIABLE' => 'my variable'))
+ expect(lexeme.evaluate('VARIABLE' => 'my variable'))
.to eq 'my variable'
end
it 'returns nil if it is not defined' do
- variable = described_class.new('VARIABLE')
-
- expect(variable.evaluate(OTHER: 'variable')).to be_nil
+ expect(lexeme.evaluate('OTHER' => 'variable')).to be_nil
+ expect(lexeme.evaluate(OTHER: 'variable')).to be_nil
end
it 'returns an empty string if it is empty' do
- variable = described_class.new('VARIABLE')
+ expect(lexeme.evaluate('VARIABLE' => '')).to eq ''
+ expect(lexeme.evaluate(VARIABLE: '')).to eq ''
+ end
+
+ it 'does not call with_indifferent_access unnecessarily' do
+ variables_hash = { VARIABLE: 'my variable' }.with_indifferent_access
- expect(variable.evaluate(VARIABLE: '')).to eq ''
+ expect(variables_hash).not_to receive(:with_indifferent_access)
+ expect(lexeme.evaluate(variables_hash)).to eq 'my variable'
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
index ec7eebdc056..84713e2a798 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Statement do
.append(key: 'PATH_VARIABLE', value: 'a/path/variable/value')
.append(key: 'FULL_PATH_VARIABLE', value: '/a/full/path/variable/value')
.append(key: 'EMPTY_VARIABLE', value: '')
+ .to_hash
end
subject do
diff --git a/spec/lib/gitlab/ci/pipeline/logger_spec.rb b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
new file mode 100644
index 00000000000..0b44e35dec1
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
+
+ subject(:logger) { described_class.new(project: project) }
+
+ describe '#log_when' do
+ it 'stores blocks for later evaluation' do
+ logger.log_when { |obs| true }
+
+ expect(logger.send(:log_conditions).first).to be_a(Proc)
+ end
+ end
+
+ describe '#instrument' do
+ it "returns the block's value" do
+ expect(logger.instrument(:expensive_operation) { 123 }).to eq(123)
+ end
+
+ it 'records durations of instrumented operations' do
+ loggable_data = {
+ 'expensive_operation_duration_s' => {
+ 'count' => 1,
+ 'avg' => a_kind_of(Numeric),
+ 'max' => a_kind_of(Numeric),
+ 'min' => a_kind_of(Numeric)
+ }
+ }
+
+ logger.instrument(:expensive_operation) { 123 }
+ expect(logger.observations_hash).to match(a_hash_including(loggable_data))
+ end
+
+ it 'raises an error when block is not provided' do
+ expect { logger.instrument(:expensive_operation) }
+ .to raise_error(ArgumentError, 'block not given')
+ end
+ end
+
+ describe '#observe' do
+ it 'records durations of observed operations' do
+ loggable_data = {
+ 'pipeline_creation_duration_s' => {
+ 'avg' => 30, 'count' => 1, 'max' => 30, 'min' => 30
+ }
+ }
+
+ expect(logger.observe(:pipeline_creation_duration_s, 30)).to be_truthy
+ expect(logger.observations_hash).to match(a_hash_including(loggable_data))
+ end
+ end
+
+ describe '#commit' do
+ subject(:commit) { logger.commit(pipeline: pipeline, caller: 'source') }
+
+ before do
+ stub_feature_flags(ci_pipeline_creation_logger: flag)
+ allow(logger).to receive(:current_monotonic_time) { Time.current.to_i }
+
+ logger.instrument(:pipeline_save) { travel(60.seconds) }
+ logger.observe(:pipeline_creation_duration_s, 30)
+ logger.observe(:pipeline_creation_duration_s, 10)
+ end
+
+ context 'when the feature flag is enabled' do
+ let(:flag) { true }
+
+ let(:loggable_data) do
+ {
+ 'class' => described_class.name.to_s,
+ 'pipeline_id' => pipeline.id,
+ 'pipeline_persisted' => true,
+ 'project_id' => project.id,
+ 'pipeline_creation_service_duration_s' => a_kind_of(Numeric),
+ 'pipeline_creation_caller' => 'source',
+ 'pipeline_source' => pipeline.source,
+ 'pipeline_save_duration_s' => {
+ 'avg' => 60, 'count' => 1, 'max' => 60, 'min' => 60
+ },
+ 'pipeline_creation_duration_s' => {
+ 'avg' => 20, 'count' => 2, 'max' => 30, 'min' => 10
+ }
+ }
+ end
+
+ it 'logs to application.json' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(commit).to be_truthy
+ end
+
+ context 'with log conditions' do
+ it 'does not log when the conditions are false' do
+ logger.log_when { |_obs| false }
+
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ expect(commit).to be_falsey
+ end
+
+ it 'logs when a condition is true' do
+ logger.log_when { |_obs| true }
+ logger.log_when { |_obs| false }
+
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(commit).to be_truthy
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ let(:flag) { false }
+
+ it 'does not log' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ expect(commit).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index e2b64e65938..68806fbf287 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:root_variables) { [] }
let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
- let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
+ let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage, when: 'on_success' } }
let(:previous_stages) { [] }
let(:current_stage) { double(seeds_names: [attributes[:name]]) }
@@ -61,17 +61,35 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
- context 'with job:rules but no explicit when:' do
- context 'is matched' do
- let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null' }] } }
+ context 'with job: rules but no explicit when:' do
+ let(:base_attributes) { { name: 'rspec', ref: 'master' } }
+
+ context 'with a manual job' do
+ context 'with a matched rule' do
+ let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR == null' }]) }
+
+ it { is_expected.to include(when: 'manual') }
+ end
- it { is_expected.to include(when: 'on_success') }
+ context 'is not matched' do
+ let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR != null' }]) }
+
+ it { is_expected.to include(when: 'never') }
+ end
end
- context 'is not matched' do
- let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR != null' }] } }
+ context 'with an automatic job' do
+ context 'is matched' do
+ let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR == null' }]) }
- it { is_expected.to include(when: 'never') }
+ it { is_expected.to include(when: 'on_success') }
+ end
+
+ context 'is not matched' do
+ let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR != null' }]) }
+
+ it { is_expected.to include(when: 'never') }
+ end
end
end
@@ -393,6 +411,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
describe '#to_resource' do
subject { seed_build.to_resource }
+ before do
+ stub_feature_flags(create_deployment_in_separate_transaction: false)
+ end
+
context 'when job is Ci::Build' do
it { is_expected.to be_a(::Ci::Build) }
it { is_expected.to be_valid }
@@ -443,6 +465,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it_behaves_like 'deployment job'
it_behaves_like 'ensures environment existence'
+ context 'when create_deployment_in_separate_transaction feature flag is enabled' do
+ before do
+ stub_feature_flags(create_deployment_in_separate_transaction: true)
+ end
+
+ it 'does not create any deployments nor environments' do
+ expect(subject.deployment).to be_nil
+ expect(Environment.count).to eq(0)
+ expect(Deployment.count).to eq(0)
+ end
+ end
+
context 'when the environment name is invalid' do
let(:attributes) { { name: 'deploy', ref: 'master', environment: '!!!' } }
@@ -452,25 +486,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
expect(subject.metadata.expanded_environment_name).to be_nil
expect(Environment.exists?(name: expected_environment_name)).to eq(false)
end
-
- context 'when surface_environment_creation_failure feature flag is disabled' do
- before do
- stub_feature_flags(surface_environment_creation_failure: false)
- end
-
- it_behaves_like 'non-deployment job'
- it_behaves_like 'ensures environment inexistence'
-
- it 'tracks an exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(an_instance_of(described_class::EnvironmentCreationFailure),
- project_id: project.id,
- reason: %q{Name can contain only letters, digits, '-', '_', '/', '$', '{', '}', '.', and spaces, but it cannot start or end with '/'})
- .once
-
- subject
- end
- end
end
end
@@ -515,6 +530,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'returns a job with resource group' do
expect(subject.resource_group).not_to be_nil
expect(subject.resource_group.key).to eq('iOS')
+ expect(Ci::ResourceGroup.count).to eq(1)
+ end
+
+ context 'when create_deployment_in_separate_transaction feature flag is enabled' do
+ before do
+ stub_feature_flags(create_deployment_in_separate_transaction: true)
+ end
+
+ it 'does not create any resource groups' do
+ expect(subject.resource_group).to be_nil
+ expect(Ci::ResourceGroup.count).to eq(0)
+ end
end
context 'when resource group has $CI_ENVIRONMENT_NAME in it' do
@@ -892,7 +919,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'using rules:' do
using RSpec::Parameterized
- let(:attributes) { { name: 'rspec', rules: rule_set } }
+ let(:attributes) { { name: 'rspec', rules: rule_set, when: 'on_success' } }
context 'with a matching if: rule' do
context 'with an explicit `when: never`' do
diff --git a/spec/lib/gitlab/ci/status/bridge/common_spec.rb b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
index 37524afc83d..30e6ad234a0 100644
--- a/spec/lib/gitlab/ci/status/bridge/common_spec.rb
+++ b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
@@ -29,7 +29,15 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Common do
end
it { expect(subject).to have_details }
- it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" }
+ it { expect(subject.details_path).to include "jobs/#{bridge.id}" }
+
+ context 'with ci_retry_downstream_pipeline ff disabled' do
+ before do
+ stub_feature_flags(ci_retry_downstream_pipeline: false)
+ end
+
+ it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" }
+ end
end
context 'when user does not have access to read downstream pipeline' do
diff --git a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
new file mode 100644
index 00000000000..6c1f56de840
--- /dev/null
+++ b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Tags::BulkInsert do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) }
+ let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) }
+ let_it_be_with_refind(:bridge) { create(:ci_bridge, pipeline: pipeline, project: project) }
+
+ let(:statuses) { [job, bridge, other_job] }
+
+ subject(:service) { described_class.new(statuses, tags_list) }
+
+ describe '#insert!' do
+ context 'without tags' do
+ let(:tags_list) { {} }
+
+ it { expect(service.insert!).to be_falsey }
+ end
+
+ context 'with tags' do
+ let(:tags_list) do
+ {
+ job.name => %w[tag1 tag2],
+ other_job.name => %w[tag2 tag3 tag4]
+ }
+ end
+
+ it 'persists tags' do
+ expect(service.insert!).to be_truthy
+
+ expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
+ expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 10275f33484..5ff34592b2f 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -24,15 +24,5 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
expect(names).to include(*keys)
end
end
-
- context 'feature flag disabled' do
- before do
- stub_feature_flags(ci_predefined_vars_in_builder: false)
- end
-
- it 'returns no variables' do
- expect(subject.map { |env| env[:key] }).to be_empty
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index f00a801286d..e8b38b21ef8 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -2139,7 +2139,7 @@ module Gitlab
end
end
- context 'with when/rules conflict' do
+ context 'with when/rules' do
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:config) do
@@ -2174,7 +2174,7 @@ module Gitlab
}
end
- it_behaves_like 'returns errors', /may not be used with `rules`: when/
+ it { is_expected.to be_valid }
end
context 'used with job-level when:delayed' do
@@ -2190,7 +2190,7 @@ module Gitlab
}
end
- it_behaves_like 'returns errors', /may not be used with `rules`: when, start_in/
+ it_behaves_like 'returns errors', /may not be used with `rules`: start_in/
end
end
diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
index b0f7703462a..f5f02046d4e 100644
--- a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
+++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
@@ -97,6 +97,6 @@ RSpec.describe Gitlab::Cleanup::OrphanLfsFileReferences do
def stub_lfs_pointers(repo, *oids)
expect(repo.gitaly_blob_client)
.to receive(:get_all_lfs_pointers)
- .and_return(oids.map { |oid| OpenStruct.new(lfs_oid: oid) })
+ .and_return(oids.map { |oid| double('pointers', lfs_oid: oid) })
end
end
diff --git a/spec/lib/gitlab/config/entry/undefined_spec.rb b/spec/lib/gitlab/config/entry/undefined_spec.rb
index 36faabd8e31..31e0f9487aa 100644
--- a/spec/lib/gitlab/config/entry/undefined_spec.rb
+++ b/spec/lib/gitlab/config/entry/undefined_spec.rb
@@ -40,4 +40,10 @@ RSpec.describe Gitlab::Config::Entry::Undefined do
expect(entry.specified?).to eq false
end
end
+
+ describe '#type' do
+ it 'returns nil' do
+ expect(entry.type).to eq nil
+ end
+ end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index c0476d38380..56e3fc269e6 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://cdn.example.com")
expect(directives['font_src']).to eq("'self' https://cdn.example.com")
expect(directives['worker_src']).to eq('http://localhost/assets/ blob: data: https://cdn.example.com')
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html")
+ expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html")
end
end
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'does not add CUSTOMER_PORTAL_URL to CSP' do
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html")
+ expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html")
end
end
@@ -123,12 +123,12 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds CUSTOMER_PORTAL_URL to CSP' do
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html")
+ expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html")
end
end
end
- context 'letter_opener applicaiton URL' do
+ context 'letter_opener application URL' do
let(:gitlab_url) { 'http://gitlab.example.com' }
let(:letter_opener_url) { "#{gitlab_url}/rails/letter_opener/" }
@@ -156,6 +156,46 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
end
end
+
+ context 'Snowplow Micro event collector' do
+ let(:snowplow_micro_hostname) { 'localhost:9090' }
+ let(:snowplow_micro_url) { "http://#{snowplow_micro_hostname}/" }
+
+ before do
+ stub_env('SNOWPLOW_MICRO_ENABLE', 1)
+ allow(Gitlab::Tracking).to receive(:collector_hostname).and_return(snowplow_micro_hostname)
+ end
+
+ context 'when in production' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'does not add Snowplow Micro URL to connect-src' do
+ expect(directives['connect_src']).not_to include(snowplow_micro_url)
+ end
+ end
+
+ context 'when in development' do
+ before do
+ stub_rails_env('development')
+ end
+
+ it 'adds Snowplow Micro URL with trailing slash to connect-src' do
+ expect(directives['connect_src']).to match(Regexp.new(snowplow_micro_url))
+ end
+
+ context 'when not enabled using ENV[SNOWPLOW_MICRO_ENABLE]' do
+ before do
+ stub_env('SNOWPLOW_MICRO_ENABLE', nil)
+ end
+
+ it 'does not add Snowplow Micro URL to connect-src' do
+ expect(directives['connect_src']).not_to include(snowplow_micro_url)
+ end
+ end
+ end
+ end
end
describe '#load' do
diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb
index 384609c6664..8a9ab736d46 100644
--- a/spec/lib/gitlab/contributions_calendar_spec.rb
+++ b/spec/lib/gitlab/contributions_calendar_spec.rb
@@ -50,7 +50,8 @@ RSpec.describe Gitlab::ContributionsCalendar do
Event.create!(
project: project,
action: action,
- target: @targets[project],
+ target_type: @targets[project].class.name,
+ target_id: @targets[project].id,
author: contributor,
created_at: DateTime.new(day.year, day.month, day.day, hour)
)
@@ -66,14 +67,34 @@ RSpec.describe Gitlab::ContributionsCalendar do
end
context "when the user has opted-in for private contributions" do
+ before do
+ contributor.update_column(:include_private_contributions, true)
+ end
+
it "shows private and public events to all users" do
- user.update_column(:include_private_contributions, true)
create_event(private_project, today)
create_event(public_project, today)
+ expect(calendar.activity_dates[today]).to eq(2)
+ expect(calendar(user).activity_dates[today]).to eq(2)
+ expect(calendar(contributor).activity_dates[today]).to eq(2)
+ end
+
+ # tests for bug https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74826
+ it "still counts correct with feature access levels set to private" do
+ create_event(private_project, today)
+
+ private_project.project_feature.update_attribute(:issues_access_level, ProjectFeature::PRIVATE)
+ private_project.project_feature.update_attribute(:repository_access_level, ProjectFeature::PRIVATE)
+ private_project.project_feature.update_attribute(:merge_requests_access_level, ProjectFeature::PRIVATE)
+
expect(calendar.activity_dates[today]).to eq(1)
expect(calendar(user).activity_dates[today]).to eq(1)
- expect(calendar(contributor).activity_dates[today]).to eq(2)
+ expect(calendar(contributor).activity_dates[today]).to eq(1)
+ end
+
+ it "does not fail if there are no contributed projects" do
+ expect(calendar.activity_dates[today]).to eq(nil)
end
end
@@ -125,6 +146,7 @@ RSpec.describe Gitlab::ContributionsCalendar do
create_event(public_project, today, 10)
create_event(public_project, today, 16)
create_event(public_project, today, 23)
+ create_event(public_project, tomorrow, 1)
end
it "renders correct event counts within the UTC timezone" do
@@ -137,14 +159,14 @@ RSpec.describe Gitlab::ContributionsCalendar do
it "renders correct event counts within the Sydney timezone" do
Time.use_zone('UTC') do
contributor.timezone = 'Sydney'
- expect(calendar.activity_dates).to eq(today => 3, tomorrow => 2)
+ expect(calendar.activity_dates).to eq(today => 3, tomorrow => 3)
end
end
it "renders correct event counts within the US Central timezone" do
Time.use_zone('UTC') do
contributor.timezone = 'Central Time (US & Canada)'
- expect(calendar.activity_dates).to eq(yesterday => 2, today => 3)
+ expect(calendar.activity_dates).to eq(yesterday => 2, today => 4)
end
end
end
@@ -169,6 +191,12 @@ RSpec.describe Gitlab::ContributionsCalendar do
expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3)
end
+ it "includes diff notes on merge request" do
+ e1 = create_event(public_project, today, 0, :commented, :diff_note_on_merge_request)
+
+ expect(calendar.events_by_date(today)).to contain_exactly(e1)
+ end
+
context 'when the user cannot read cross project' do
before do
allow(Ability).to receive(:allowed?).and_call_original
diff --git a/spec/lib/gitlab/daemon_spec.rb b/spec/lib/gitlab/daemon_spec.rb
index 075a1e414c7..4d11b0bdc6c 100644
--- a/spec/lib/gitlab/daemon_spec.rb
+++ b/spec/lib/gitlab/daemon_spec.rb
@@ -46,6 +46,30 @@ RSpec.describe Gitlab::Daemon do
expect(subject).to have_received(:run_thread)
end
+
+ context '@synchronous' do
+ context 'when @synchronous is set to true' do
+ subject { described_class.instance(synchronous: true) }
+
+ it 'calls join on the thread' do
+ # Thread has to be run in a block, expect_next_instance_of does not support this.
+ expect_any_instance_of(Thread).to receive(:join) # rubocop:disable RSpec/AnyInstanceOf
+
+ subject.start
+ end
+ end
+
+ context 'when @synchronous is not set to a truthy value' do
+ subject { described_class.instance }
+
+ it 'does not call join on the thread' do
+ # Thread has to be run in a block, expect_next_instance_of does not support this.
+ expect_any_instance_of(Thread).not_to receive(:join) # rubocop:disable RSpec/AnyInstanceOf
+
+ subject.start
+ end
+ end
+ end
end
describe '#stop' do
diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
index b4010d0fe8d..7ad3eb395a9 100644
--- a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
+ include ExclusiveLeaseHelpers
+
describe '#perform' do
subject { described_class.new(async_index) }
@@ -10,7 +12,18 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
- let(:connection) { ApplicationRecord.connection }
+ let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection) { model.connection }
+
+ let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
+ let(:lease_key) { "gitlab/database/async_indexes/index_creator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
context 'when the index already exists' do
before do
@@ -40,7 +53,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
end
it 'skips logic if not able to acquire exclusive lease' do
- expect(subject).to receive(:try_obtain_lease).and_return(false)
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
expect(connection).not_to receive(:execute).with(/CREATE INDEX/)
expect(async_index).not_to receive(:destroy)
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 0182e0f7651..c4364826ee2 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -17,15 +17,19 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
let_it_be(:stuck_job) { create(:batched_background_migration_job, status: :pending, updated_at: fixed_time - described_class::STUCK_JOBS_TIMEOUT) }
let_it_be(:failed_job) { create(:batched_background_migration_job, status: :failed, attempts: 1) }
- before_all do
- create(:batched_background_migration_job, status: :failed, attempts: described_class::MAX_ATTEMPTS)
- create(:batched_background_migration_job, status: :succeeded)
- end
+ let!(:max_attempts_failed_job) { create(:batched_background_migration_job, status: :failed, attempts: described_class::MAX_ATTEMPTS) }
+ let!(:succeeded_job) { create(:batched_background_migration_job, status: :succeeded) }
before do
travel_to fixed_time
end
+ describe '.except_succeeded' do
+ it 'returns not succeeded jobs' do
+ expect(described_class.except_succeeded).to contain_exactly(pending_job, running_job, stuck_job, failed_job, max_attempts_failed_job)
+ end
+ end
+
describe '.active' do
it 'returns active jobs' do
expect(described_class.active).to contain_exactly(pending_job, running_job, stuck_job)
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index a1c2634f59c..49714cfc4dd 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -23,6 +23,28 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
subject { build(:batched_background_migration) }
it { is_expected.to validate_uniqueness_of(:job_arguments).scoped_to(:job_class_name, :table_name, :column_name) }
+
+ context 'when there are failed jobs' do
+ let(:batched_migration) { create(:batched_background_migration, status: :active, total_tuple_count: 100) }
+ let!(:batched_job) { create(:batched_background_migration_job, batched_migration: batched_migration, status: :failed) }
+
+ it 'raises an exception' do
+ expect { batched_migration.finished! }.to raise_error(ActiveRecord::RecordInvalid)
+
+ expect(batched_migration.reload.status).to eql 'active'
+ end
+ end
+
+ context 'when the jobs are completed' do
+ let(:batched_migration) { create(:batched_background_migration, status: :active, total_tuple_count: 100) }
+ let!(:batched_job) { create(:batched_background_migration_job, batched_migration: batched_migration, status: :succeeded) }
+
+ it 'finishes the migration' do
+ batched_migration.finished!
+
+ expect(batched_migration.status).to eql 'finished'
+ end
+ end
end
describe '.queue_order' do
@@ -214,14 +236,20 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
- shared_examples_for 'an attr_writer that demodulizes assigned class names' do |attribute_name|
+ shared_examples_for 'an attr_writer that assigns class names' do |attribute_name|
let(:batched_migration) { build(:batched_background_migration) }
context 'when a module name exists' do
- it 'removes the module name' do
+ it 'keeps the class with module name' do
+ batched_migration.public_send(:"#{attribute_name}=", 'Foo::Bar')
+
+ expect(batched_migration[attribute_name]).to eq('Foo::Bar')
+ end
+
+ it 'removes leading namespace resolution operator' do
batched_migration.public_send(:"#{attribute_name}=", '::Foo::Bar')
- expect(batched_migration[attribute_name]).to eq('Bar')
+ expect(batched_migration[attribute_name]).to eq('Foo::Bar')
end
end
@@ -271,11 +299,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '#job_class_name=' do
- it_behaves_like 'an attr_writer that demodulizes assigned class names', :job_class_name
+ it_behaves_like 'an attr_writer that assigns class names', :job_class_name
end
describe '#batch_class_name=' do
- it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name
+ it_behaves_like 'an attr_writer that assigns class names', :batch_class_name
end
describe '#migrated_tuple_count' do
diff --git a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
index 9d49db1f018..e7b9c5fcd02 100644
--- a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
+++ b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
@@ -5,24 +5,24 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do
before do
create_list(:project, 3)
- create(:identity)
+ create_list(:ci_instance_variable, 2)
end
subject { described_class.new(models).count }
describe '#count' do
- let(:models) { [Project, Identity] }
+ let(:models) { [Project, Ci::InstanceVariable] }
context 'when reltuples is up to date' do
before do
- ActiveRecord::Base.connection.execute('ANALYZE projects')
- ActiveRecord::Base.connection.execute('ANALYZE identities')
+ Project.connection.execute('ANALYZE projects')
+ Ci::InstanceVariable.connection.execute('ANALYZE ci_instance_variables')
end
it 'uses statistics to do the count' do
models.each { |model| expect(model).not_to receive(:count) }
- expect(subject).to eq({ Project => 3, Identity => 1 })
+ expect(subject).to eq({ Project => 3, Ci::InstanceVariable => 2 })
end
end
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do
before do
models.each do |model|
- ActiveRecord::Base.connection.execute("ANALYZE #{model.table_name}")
+ model.connection.execute("ANALYZE #{model.table_name}")
end
end
@@ -45,7 +45,9 @@ RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do
context 'insufficient permissions' do
it 'returns an empty hash' do
- allow(ActiveRecord::Base).to receive(:transaction).and_raise(PG::InsufficientPrivilege)
+ Gitlab::Database.database_base_models.each_value do |base_model|
+ allow(base_model).to receive(:transaction).and_raise(PG::InsufficientPrivilege)
+ end
expect(subject).to eq({})
end
diff --git a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
index 2f261aebf02..37d3e13a7ab 100644
--- a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
+++ b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do
before do
create_list(:project, 3)
+ create_list(:ci_instance_variable, 2)
create(:identity)
create(:group)
end
- let(:models) { [Project, Identity, Group, Namespace] }
+ let(:models) { [Project, Ci::InstanceVariable, Identity, Group, Namespace] }
let(:strategy) { described_class.new(models) }
subject { strategy.count }
@@ -20,7 +21,8 @@ RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do
Project => threshold + 1,
Identity => threshold - 1,
Group => threshold + 1,
- Namespace => threshold + 1
+ Namespace => threshold + 1,
+ Ci::InstanceVariable => threshold + 1
}
end
@@ -43,12 +45,14 @@ RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do
expect(Project).not_to receive(:count)
expect(Group).not_to receive(:count)
expect(Namespace).not_to receive(:count)
+ expect(Ci::InstanceVariable).not_to receive(:count)
result = subject
expect(result[Project]).to eq(3)
expect(result[Group]).to eq(1)
# 1-Group, 3 namespaces for each project and 3 project namespaces for each project
expect(result[Namespace]).to eq(7)
+ expect(result[Ci::InstanceVariable]).to eq(2)
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
index eef248afdf2..796c14c1038 100644
--- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -140,6 +140,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration do
end
describe '#service_discovery_enabled?' do
+ it 'returns false when running inside a Rake task' do
+ allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
+
+ config = described_class.new(ActiveRecord::Base)
+ config.service_discovery[:record] = 'foo'
+
+ expect(config.service_discovery_enabled?).to eq(false)
+ end
+
it 'returns true when a record is configured' do
config = described_class.new(ActiveRecord::Base)
config.service_discovery[:record] = 'foo'
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 37b83729125..3c7819c04b6 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -487,25 +487,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe 'primary connection re-use', :reestablished_active_record_base do
+ describe 'primary connection re-use', :reestablished_active_record_base, :add_ci_connection do
let(:model) { Ci::ApplicationRecord }
- around do |example|
- if Gitlab::Database.has_config?(:ci)
- example.run
- else
- # fake additional Database
- model.establish_connection(
- ActiveRecord::DatabaseConfigurations::HashConfig.new(Rails.env, 'ci', ActiveRecord::Base.connection_db_config.configuration_hash)
- )
-
- example.run
-
- # Cleanup connection_specification_name for Ci::ApplicationRecord
- model.remove_connection
- end
- end
-
describe '#read' do
it 'returns ci replica connection' do
expect { |b| lb.read(&b) }.to yield_with_args do |args|
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
index e9bc465b1c7..f05910e5123 100644
--- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
@@ -4,9 +4,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
let(:load_balancer) do
- Gitlab::Database::LoadBalancing::LoadBalancer.new(
- Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
- )
+ configuration = Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
+ configuration.service_discovery[:record] = 'localhost'
+
+ Gitlab::Database::LoadBalancing::LoadBalancer.new(configuration)
end
let(:service) do
@@ -86,6 +87,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
service.perform_service_discovery
end
end
+
context 'with failures' do
before do
allow(Gitlab::ErrorTracking).to receive(:track_exception)
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index de2ad662d16..31be3963565 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -5,7 +5,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues do
let(:middleware) { described_class.new }
let(:worker) { worker_class.new }
- let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } }
+ let(:location) {'0/D525E3A8' }
+ let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } }
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } }
before do
skip_feature_flags_yaml_validation
@@ -60,9 +62,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
end
shared_examples_for 'replica is up to date' do |expected_strategy|
- let(:location) {'0/D525E3A8' }
- let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } }
-
it 'does not stick to the primary', :aggregate_failures do
expect(ActiveRecord::Base.load_balancer)
.to receive(:select_up_to_date_host)
@@ -77,9 +76,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
include_examples 'load balancing strategy', expected_strategy
end
- shared_examples_for 'sticks based on data consistency' do |data_consistency|
- include_context 'data consistency worker class', data_consistency, :load_balancing_for_test_data_consistency_worker
-
+ shared_examples_for 'sticks based on data consistency' do
context 'when load_balancing_for_test_data_consistency_worker is disabled' do
before do
stub_feature_flags(load_balancing_for_test_data_consistency_worker: false)
@@ -116,23 +113,78 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
it_behaves_like 'replica is up to date', 'replica'
end
- context 'when legacy wal location is set' do
- let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_write_location' => '0/D525E3A8' } }
+ context 'when database location is not set' do
+ let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } }
- before do
- allow(ActiveRecord::Base.load_balancer)
- .to receive(:select_up_to_date_host)
- .with('0/D525E3A8')
- .and_return(true)
- end
+ include_examples 'stick to the primary', 'primary_no_wal'
+ end
+ end
- it_behaves_like 'replica is up to date', 'replica'
+ shared_examples_for 'sleeps when necessary' do
+ context 'when WAL locations are blank', :freeze_time do
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3) } }
+
+ it 'does not sleep' do
+ expect(middleware).not_to receive(:sleep)
+
+ run_middleware
+ end
end
- context 'when database location is not set' do
- let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } }
+ context 'when WAL locations are present', :freeze_time do
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } }
- include_examples 'stick to the primary', 'primary_no_wal'
+ context 'when delay interval has not elapsed' do
+ let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 }
+
+ context 'when replica is up to date' do
+ before do
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ allow(lb).to receive(:select_up_to_date_host).and_return(true)
+ end
+ end
+
+ it 'does not sleep' do
+ expect(middleware).not_to receive(:sleep)
+
+ run_middleware
+ end
+ end
+
+ context 'when replica is not up to date' do
+ before do
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ allow(lb).to receive(:select_up_to_date_host).and_return(false, true)
+ end
+ end
+
+ it 'sleeps until the minimum delay is reached' do
+ expect(middleware).to receive(:sleep).with(be_within(0.01).of(described_class::MINIMUM_DELAY_INTERVAL_SECONDS - elapsed_time))
+
+ run_middleware
+ end
+ end
+ end
+
+ context 'when delay interval has elapsed' do
+ let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS + 0.3 }
+
+ it 'does not sleep' do
+ expect(middleware).not_to receive(:sleep)
+
+ run_middleware
+ end
+ end
+
+ context 'when created_at is in the future' do
+ let(:elapsed_time) { -5 }
+
+ it 'does not sleep' do
+ expect(middleware).not_to receive(:sleep)
+
+ run_middleware
+ end
+ end
end
end
@@ -146,10 +198,24 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker
include_examples 'stick to the primary', 'primary'
+
+ context 'when delay interval has not elapsed', :freeze_time do
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } }
+ let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 }
+
+ it 'does not sleep' do
+ expect(middleware).not_to receive(:sleep)
+
+ run_middleware
+ end
+ end
end
context 'when worker data consistency is :delayed' do
- include_examples 'sticks based on data consistency', :delayed
+ include_context 'data consistency worker class', :delayed, :load_balancing_for_test_data_consistency_worker
+
+ include_examples 'sticks based on data consistency'
+ include_examples 'sleeps when necessary'
context 'when replica is not up to date' do
before do
@@ -177,7 +243,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
end
context 'when job is retried' do
- let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8', 'retry_count' => 0 } }
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, 'retry_count' => 0 } }
context 'and replica still lagging behind' do
include_examples 'stick to the primary', 'primary'
@@ -195,7 +261,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
end
context 'when worker data consistency is :sticky' do
- include_examples 'sticks based on data consistency', :sticky
+ include_context 'data consistency worker class', :sticky, :load_balancing_for_test_data_consistency_worker
+
+ include_examples 'sticks based on data consistency'
+ include_examples 'sleeps when necessary'
context 'when replica is not up to date' do
before do
@@ -255,7 +324,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
end
def run_middleware
- middleware.call(worker, job, double(:queue)) { yield }
+ middleware.call(worker, job, double(:queue)) { yield if block_given? }
rescue described_class::JobReplicaNotUpToDate
# we silence errors here that cause the job to retry
end
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index d88554614cf..f3139bb1b4f 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -256,15 +256,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
expect(sticking.last_write_location_for(:user, 4)).to be_nil
end
-
- it 'removes the old key' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(sticking.send(:old_redis_key_for, :user, 4), 'foo', ex: 30)
- end
-
- sticking.unstick(:user, 4)
- expect(sticking.last_write_location_for(:user, 4)).to be_nil
- end
end
describe '#last_write_location_for' do
@@ -273,14 +264,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
expect(sticking.last_write_location_for(:user, 4)).to eq('foo')
end
-
- it 'falls back to reading the old key' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(sticking.send(:old_redis_key_for, :user, 4), 'foo', ex: 30)
- end
-
- expect(sticking.last_write_location_for(:user, 4)).to eq('foo')
- end
end
describe '#redis_key_for' do
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index 65ffe539910..45878b2e266 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -38,6 +38,24 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
end
+ describe '.primary_only?' do
+ it 'returns true if all load balancers have no replicas' do
+ described_class.each_load_balancer do |lb|
+ allow(lb).to receive(:primary_only?).and_return(true)
+ end
+
+ expect(described_class.primary_only?).to eq(true)
+ end
+
+ it 'returns false if at least one has replicas' do
+ described_class.each_load_balancer.with_index do |lb, index|
+ allow(lb).to receive(:primary_only?).and_return(index != 0)
+ end
+
+ expect(described_class.primary_only?).to eq(false)
+ end
+ end
+
describe '.release_hosts' do
it 'releases the host of every load balancer' do
described_class.each_load_balancer do |lb|
diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
new file mode 100644
index 00000000000..13f2d31bc32
--- /dev/null
+++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LooseForeignKeys do
+ describe 'verify all definitions' do
+ subject(:definitions) { described_class.definitions }
+
+ it 'all definitions have assigned a known gitlab_schema and on_delete' do
+ is_expected.to all(have_attributes(
+ options: a_hash_including(
+ column: be_a(String),
+ gitlab_schema: be_in(Gitlab::Database.schemas_to_base_models.symbolize_keys.keys),
+ on_delete: be_in([:async_delete, :async_nullify])
+ ),
+ from_table: be_a(String),
+ to_table: be_a(String)
+ ))
+ end
+
+ describe 'ensuring database integrity' do
+ def base_models_for(table)
+ parent_table_schema = Gitlab::Database::GitlabSchema.table_schema(table)
+ Gitlab::Database.schemas_to_base_models.fetch(parent_table_schema)
+ end
+
+ it 'all `to_table` tables are present' do
+ definitions.each do |definition|
+ base_models_for(definition.to_table).each do |model|
+ expect(model.connection).to be_table_exist(definition.to_table)
+ end
+ end
+ end
+
+ it 'all `from_table` tables are present' do
+ definitions.each do |definition|
+ base_models_for(definition.from_table).each do |model|
+ expect(model.connection).to be_table_exist(definition.from_table)
+ expect(model.connection).to be_column_exist(definition.from_table, definition.column)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
index f1dbfbbff18..25fc676d09e 100644
--- a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
@@ -47,11 +47,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do
record_to_be_deleted.delete
expect(LooseForeignKeys::DeletedRecord.count).to eq(1)
- deleted_record = LooseForeignKeys::DeletedRecord.all.first
+
+ arel_table = LooseForeignKeys::DeletedRecord.arel_table
+ deleted_record = LooseForeignKeys::DeletedRecord
+ .select(arel_table[Arel.star], arel_table[:partition].as('partition_number')) # aliasing the ignored partition column to partition_number
+ .all
+ .first
expect(deleted_record.primary_key_value).to eq(record_to_be_deleted.id)
expect(deleted_record.fully_qualified_table_name).to eq('public._test_loose_fk_test_table')
- expect(deleted_record.partition).to eq(1)
+ expect(deleted_record.partition_number).to eq(1)
end
it 'stores multiple record deletions' do
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index ea755f5a368..7f80bed04a4 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -2431,7 +2431,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:issues) { table(:issues) }
def setup
- namespace = namespaces.create!(name: 'foo', path: 'foo')
+ namespace = namespaces.create!(name: 'foo', path: 'foo', type: Namespaces::UserNamespace.sti_name)
projects.create!(namespace_id: namespace.id)
end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index e42a6c970ea..99c7d70724c 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -7,78 +7,6 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
ActiveRecord::Migration.new.extend(described_class)
end
- describe '#bulk_queue_background_migration_jobs_by_range' do
- context 'when the model has an ID column' do
- let!(:id1) { create(:user).id }
- let!(:id2) { create(:user).id }
- let!(:id3) { create(:user).id }
-
- before do
- User.class_eval do
- include EachBatch
- end
- end
-
- context 'with enough rows to bulk queue jobs more than once' do
- before do
- stub_const('Gitlab::Database::Migrations::BackgroundMigrationHelpers::JOB_BUFFER_SIZE', 1)
- end
-
- it 'queues jobs correctly' do
- Sidekiq::Testing.fake! do
- model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
-
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
- end
- end
-
- it 'queues jobs in groups of buffer size 1' do
- expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]]])
- expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id3, id3]]])
-
- model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
- end
- end
-
- context 'with not enough rows to bulk queue jobs more than once' do
- it 'queues jobs correctly' do
- Sidekiq::Testing.fake! do
- model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
-
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
- end
- end
-
- it 'queues jobs in bulk all at once (big buffer size)' do
- expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]],
- ['FooJob', [id3, id3]]])
-
- model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
- end
- end
-
- context 'without specifying batch_size' do
- it 'queues jobs correctly' do
- Sidekiq::Testing.fake! do
- model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob')
-
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3]])
- end
- end
- end
- end
-
- context "when the model doesn't have an ID column" do
- it 'raises error (for now)' do
- expect do
- model.bulk_queue_background_migration_jobs_by_range(ProjectAuthorization, 'FooJob')
- end.to raise_error(StandardError, /does not have an ID/)
- end
- end
- end
-
describe '#queue_background_migration_jobs_by_range_at_intervals' do
context 'when the model has an ID column' do
let!(:id1) { create(:user).id }
@@ -354,161 +282,6 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
end
end
- describe '#queue_batched_background_migration' do
- let(:pgclass_info) { instance_double('Gitlab::Database::PgClass', cardinality_estimate: 42) }
-
- before do
- allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
- end
-
- context 'when such migration already exists' do
- it 'does not create duplicate migration' do
- create(
- :batched_background_migration,
- job_class_name: 'MyJobClass',
- table_name: :projects,
- column_name: :id,
- interval: 10.minutes,
- min_value: 5,
- max_value: 1005,
- batch_class_name: 'MyBatchClass',
- batch_size: 200,
- sub_batch_size: 20,
- job_arguments: [[:id], [:id_convert_to_bigint]]
- )
-
- expect do
- model.queue_batched_background_migration(
- 'MyJobClass',
- :projects,
- :id,
- [:id], [:id_convert_to_bigint],
- job_interval: 5.minutes,
- batch_min_value: 5,
- batch_max_value: 1000,
- batch_class_name: 'MyBatchClass',
- batch_size: 100,
- sub_batch_size: 10)
- end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
- end
- end
-
- it 'creates the database record for the migration' do
- expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
-
- expect do
- model.queue_batched_background_migration(
- 'MyJobClass',
- :projects,
- :id,
- job_interval: 5.minutes,
- batch_min_value: 5,
- batch_max_value: 1000,
- batch_class_name: 'MyBatchClass',
- batch_size: 100,
- sub_batch_size: 10)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
- job_class_name: 'MyJobClass',
- table_name: 'projects',
- column_name: 'id',
- interval: 300,
- min_value: 5,
- max_value: 1000,
- batch_class_name: 'MyBatchClass',
- batch_size: 100,
- sub_batch_size: 10,
- job_arguments: %w[],
- status: 'active',
- total_tuple_count: pgclass_info.cardinality_estimate)
- end
-
- context 'when the job interval is lower than the minimum' do
- let(:minimum_delay) { described_class::BATCH_MIN_DELAY }
-
- it 'sets the job interval to the minimum value' do
- expect do
- model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: minimum_delay - 1.minute)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
-
- expect(created_migration.interval).to eq(minimum_delay)
- end
- end
-
- context 'when additional arguments are passed to the method' do
- it 'saves the arguments on the database record' do
- expect do
- model.queue_batched_background_migration(
- 'MyJobClass',
- :projects,
- :id,
- 'my',
- 'arguments',
- job_interval: 5.minutes,
- batch_max_value: 1000)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
- job_class_name: 'MyJobClass',
- table_name: 'projects',
- column_name: 'id',
- interval: 300,
- min_value: 1,
- max_value: 1000,
- job_arguments: %w[my arguments])
- end
- end
-
- context 'when the max_value is not given' do
- context 'when records exist in the database' do
- let!(:event1) { create(:event) }
- let!(:event2) { create(:event) }
- let!(:event3) { create(:event) }
-
- it 'creates the record with the current max value' do
- expect do
- model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
-
- expect(created_migration.max_value).to eq(event3.id)
- end
-
- it 'creates the record with an active status' do
- expect do
- model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_active
- end
- end
-
- context 'when the database is empty' do
- it 'sets the max value to the min value' do
- expect do
- model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
-
- expect(created_migration.max_value).to eq(created_migration.min_value)
- end
-
- it 'creates the record with a finished status' do
- expect do
- model.queue_batched_background_migration('MyJobClass', :projects, :id, job_interval: 5.minutes)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
-
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished
- end
- end
- end
- end
-
describe '#migrate_async' do
it 'calls BackgroundMigrationWorker.perform_async' do
expect(BackgroundMigrationWorker).to receive(:perform_async).with("Class", "hello", "world")
@@ -583,7 +356,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
end
describe '#finalized_background_migration' do
- let(:job_coordinator) { Gitlab::BackgroundMigration::JobCoordinator.new(:main, BackgroundMigrationWorker) }
+ let(:job_coordinator) { Gitlab::BackgroundMigration::JobCoordinator.new(BackgroundMigrationWorker) }
let!(:job_class_name) { 'TestJob' }
let!(:job_class) { Class.new }
@@ -605,7 +378,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
job_class.define_method(:perform, job_perform_method)
allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database)
- .with(:main).and_return(job_coordinator)
+ .with('main').and_return(job_coordinator)
expect(job_coordinator).to receive(:migration_class_for)
.with(job_class_name).at_least(:once) { job_class }
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
new file mode 100644
index 00000000000..c45149d67bf
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers do
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ describe '#queue_batched_background_migration' do
+ let(:pgclass_info) { instance_double('Gitlab::Database::PgClass', cardinality_estimate: 42) }
+
+ before do
+ allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
+ end
+
+ context 'when such migration already exists' do
+ it 'does not create duplicate migration' do
+ create(
+ :batched_background_migration,
+ job_class_name: 'MyJobClass',
+ table_name: :projects,
+ column_name: :id,
+ interval: 10.minutes,
+ min_value: 5,
+ max_value: 1005,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 200,
+ sub_batch_size: 20,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
+
+ expect do
+ migration.queue_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ [:id], [:id_convert_to_bigint],
+ job_interval: 5.minutes,
+ batch_min_value: 5,
+ batch_max_value: 1000,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 100,
+ sub_batch_size: 10)
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
+ end
+ end
+
+ it 'creates the database record for the migration' do
+ expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
+
+ expect do
+ migration.queue_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ job_interval: 5.minutes,
+ batch_min_value: 5,
+ batch_max_value: 1000,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 100,
+ sub_batch_size: 10)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
+ job_class_name: 'MyJobClass',
+ table_name: 'projects',
+ column_name: 'id',
+ interval: 300,
+ min_value: 5,
+ max_value: 1000,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 100,
+ sub_batch_size: 10,
+ job_arguments: %w[],
+ status: 'active',
+ total_tuple_count: pgclass_info.cardinality_estimate)
+ end
+
+ context 'when the job interval is lower than the minimum' do
+ let(:minimum_delay) { described_class::BATCH_MIN_DELAY }
+
+ it 'sets the job interval to the minimum value' do
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: minimum_delay - 1.minute)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.interval).to eq(minimum_delay)
+ end
+ end
+
+ context 'when additional arguments are passed to the method' do
+ it 'saves the arguments on the database record' do
+ expect do
+ migration.queue_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ 'my',
+ 'arguments',
+ job_interval: 5.minutes,
+ batch_max_value: 1000)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
+ job_class_name: 'MyJobClass',
+ table_name: 'projects',
+ column_name: 'id',
+ interval: 300,
+ min_value: 1,
+ max_value: 1000,
+ job_arguments: %w[my arguments])
+ end
+ end
+
+ context 'when the max_value is not given' do
+ context 'when records exist in the database' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ it 'creates the record with the current max value' do
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.max_value).to eq(event3.id)
+ end
+
+ it 'creates the record with an active status' do
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_active
+ end
+ end
+
+ context 'when the database is empty' do
+ it 'sets the max value to the min value' do
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.max_value).to eq(created_migration.min_value)
+ end
+
+ it 'creates the record with a finished status' do
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :projects, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index 841d2a98a16..902d8e13a63 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Instrumentation do
let(:result_dir) { Dir.mktmpdir }
+ let(:connection) { ActiveRecord::Migration.connection }
after do
FileUtils.rm_rf(result_dir)
@@ -14,11 +15,11 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
let(:migration_version) { '12345' }
it 'executes the given block' do
- expect { |b| subject.observe(version: migration_version, name: migration_name, &b) }.to yield_control
+ expect { |b| subject.observe(version: migration_version, name: migration_name, connection: connection, &b) }.to yield_control
end
context 'behavior with observers' do
- subject { described_class.new(observer_classes: [Gitlab::Database::Migrations::Observers::MigrationObserver], result_dir: result_dir).observe(version: migration_version, name: migration_name) {} }
+ subject { described_class.new(observer_classes: [Gitlab::Database::Migrations::Observers::MigrationObserver], result_dir: result_dir).observe(version: migration_version, name: migration_name, connection: connection) {} }
let(:observer) { instance_double('Gitlab::Database::Migrations::Observers::MigrationObserver', before: nil, after: nil, record: nil) }
@@ -29,7 +30,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
it 'instantiates observer with observation' do
expect(Gitlab::Database::Migrations::Observers::MigrationObserver)
.to receive(:new)
- .with(instance_of(Gitlab::Database::Migrations::Observation), anything) { |observation| expect(observation.version).to eq(migration_version) }
+ .with(instance_of(Gitlab::Database::Migrations::Observation), anything, connection) { |observation| expect(observation.version).to eq(migration_version) }
.and_return(observer)
subject
@@ -63,7 +64,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'on successful execution' do
- subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name) {} }
+ subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name, connection: connection) {} }
it 'records walltime' do
expect(subject.walltime).not_to be_nil
@@ -83,7 +84,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'upon failure' do
- subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name) { raise 'something went wrong' } }
+ subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name, connection: connection) { raise 'something went wrong' } }
it 'raises the exception' do
expect { subject }.to raise_error(/something went wrong/)
@@ -93,7 +94,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
subject { instance.observations.first }
before do
- instance.observe(version: migration_version, name: migration_name) { raise 'something went wrong' }
+ instance.observe(version: migration_version, name: migration_name, connection: connection) { raise 'something went wrong' }
rescue StandardError
# ignore
end
@@ -125,8 +126,8 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
let(:migration2) { double('migration2', call: nil) }
it 'records observations for all migrations' do
- subject.observe(version: migration_version, name: migration_name) {}
- subject.observe(version: migration_version, name: migration_name) { raise 'something went wrong' } rescue nil
+ subject.observe(version: migration_version, name: migration_name, connection: connection) {}
+ subject.observe(version: migration_version, name: migration_name, connection: connection) { raise 'something went wrong' } rescue nil
expect(subject.observations.size).to eq(2)
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
index 191ac29e3b3..5a19ae6581d 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do
- subject { described_class.new(observation, directory_path) }
+ subject { described_class.new(observation, directory_path, connection) }
+ let(:connection) { ActiveRecord::Migration.connection }
let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) }
- let(:connection) { ActiveRecord::Base.connection }
let(:query) { "select date_trunc('day', $1::timestamptz) + $2 * (interval '1 hour')" }
let(:query_binds) { [Time.current, 3] }
let(:directory_path) { Dir.mktmpdir }
diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
index 2e70a85fd5b..7b01e39f5f1 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
- subject { described_class.new(observation, directory_path) }
+ subject { described_class.new(observation, directory_path, connection) }
let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) }
- let(:connection) { ActiveRecord::Base.connection }
+ let(:connection) { ActiveRecord::Migration.connection }
let(:query) { 'select 1' }
let(:directory_path) { Dir.mktmpdir }
let(:migration_version) { 20210422152437 }
diff --git a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
index 9727a215d71..2515f0d4a06 100644
--- a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do
- subject { described_class.new(observation, double("unused path")) }
+ subject { described_class.new(observation, double("unused path"), connection) }
let(:observation) { Gitlab::Database::Migrations::Observation.new }
- let(:connection) { ActiveRecord::Base.connection }
+ let(:connection) { ActiveRecord::Migration.connection }
def mock_pgss(enabled: true)
if enabled
diff --git a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
index e689759c574..4b08838d6bb 100644
--- a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::TotalDatabaseSizeChange do
- subject { described_class.new(observation, double('unused path')) }
+ subject { described_class.new(observation, double('unused path'), connection) }
let(:observation) { Gitlab::Database::Migrations::Observation.new }
- let(:connection) { ActiveRecord::Base.connection }
+ let(:connection) { ActiveRecord::Migration.connection }
let(:query) { 'select pg_database_size(current_database())' }
it 'records the size change' do
diff --git a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
index e65f89747c4..b26bb8fbe41 100644
--- a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
+++ b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
@@ -2,8 +2,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Observers::TransactionDuration do
- subject(:transaction_duration_observer) { described_class.new(observation, directory_path) }
+ subject(:transaction_duration_observer) { described_class.new(observation, directory_path, connection) }
+ let(:connection) { ActiveRecord::Migration.connection }
let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) }
let(:directory_path) { Dir.mktmpdir }
let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-transaction-duration.json" }
@@ -78,17 +79,17 @@ RSpec.describe Gitlab::Database::Migrations::Observers::TransactionDuration do
end
def run_real_transactions
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
end
end
def run_sub_transactions
- ActiveRecord::Base.transaction(requires_new: true) do
+ ApplicationRecord.transaction(requires_new: true) do
end
end
def run_transaction
- ActiveRecord::Base.connection_pool.with_connection do |connection|
+ ApplicationRecord.connection_pool.with_connection do |connection|
Gitlab::Database::SharedModel.using_connection(connection) do
Gitlab::Database::SharedModel.transaction do
Gitlab::Database::SharedModel.transaction(requires_new: true) do
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index 52fb5ec2ba8..4616bd6941e 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
it 'runs the unapplied migrations in version order', :aggregate_failures do
up.run
- expect(migration_runs.map(&:dir)).to eq([:up, :up])
+ expect(migration_runs.map(&:dir)).to match_array([:up, :up])
expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version))
end
end
@@ -101,7 +101,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
it 'runs the applied migrations for the current branch in reverse order', :aggregate_failures do
down.run
- expect(migration_runs.map(&:dir)).to eq([:down, :down])
+ expect(migration_runs.map(&:dir)).to match_array([:down, :down])
expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version))
end
end
diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
index b2c4e4b54a4..2ef873e8adb 100644
--- a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
@@ -90,18 +90,6 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
expect(table_oid('test_partition')).to be_nil
end
- context 'when the drop_detached_partitions feature flag is disabled' do
- before do
- stub_feature_flags(drop_detached_partitions: false)
- end
-
- it 'does not drop the partition' do
- dropper.perform
-
- expect(table_oid('test_partition')).not_to be_nil
- end
- end
-
context 'removing foreign keys' do
it 'removes foreign keys from the table before dropping it' do
expect(dropper).to receive(:drop_detached_partition).and_wrap_original do |drop_method, partition_name|
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index 1c6f5c5c694..5e107109fc9 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
subject(:sync_partitions) { described_class.new(model).sync_partitions }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
- let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: []) }
+ let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "some_table" }
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:manager) { described_class.new(model) }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
- let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: []) }
+ let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "foo" }
@@ -101,28 +101,10 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
]
end
- context 'with the partition_pruning feature flag enabled' do
- before do
- stub_feature_flags(partition_pruning: true)
- end
-
- it 'detaches each extra partition' do
- extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) }
-
- sync_partitions
- end
- end
+ it 'detaches each extra partition' do
+ extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) }
- context 'with the partition_pruning feature flag disabled' do
- before do
- stub_feature_flags(partition_pruning: false)
- end
-
- it 'returns immediately' do
- expect(manager).not_to receive(:detach)
-
- sync_partitions
- end
+ sync_partitions
end
end
diff --git a/spec/lib/gitlab/database/partitioning/single_numeric_list_partition_spec.rb b/spec/lib/gitlab/database/partitioning/single_numeric_list_partition_spec.rb
new file mode 100644
index 00000000000..9941241e846
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/single_numeric_list_partition_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::SingleNumericListPartition do
+ describe '.from_sql' do
+ subject(:parsed_partition) { described_class.from_sql(table, partition_name, definition) }
+
+ let(:table) { 'partitioned_table' }
+ let(:partition_value) { 0 }
+ let(:partition_name) { "partitioned_table_#{partition_value}" }
+ let(:definition) { "FOR VALUES IN ('#{partition_value}')" }
+
+ it 'uses specified table name' do
+ expect(parsed_partition.table).to eq(table)
+ end
+
+ it 'uses specified partition name' do
+ expect(parsed_partition.partition_name).to eq(partition_name)
+ end
+
+ it 'parses the definition' do
+ expect(parsed_partition.value).to eq(partition_value)
+ end
+ end
+
+ describe '#partition_name' do
+ it 'is the explicit name if provided' do
+ expect(described_class.new('table', 1, partition_name: 'some_other_name').partition_name).to eq('some_other_name')
+ end
+
+ it 'defaults to the table name followed by the partition value' do
+ expect(described_class.new('table', 1).partition_name).to eq('table_1')
+ end
+ end
+
+ context 'sorting' do
+ it 'is incomparable if the tables do not match' do
+ expect(described_class.new('table1', 1) <=> described_class.new('table2', 2)).to be_nil
+ end
+
+ it 'sorts by the value when the tables match' do
+ expect(described_class.new('table1', 1) <=> described_class.new('table1', 2)).to eq(1 <=> 2)
+ end
+
+ it 'sorts by numeric value rather than text value' do
+ expect(described_class.new('table', 10)).to be > described_class.new('table', 9)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
new file mode 100644
index 00000000000..636a09e5710
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -0,0 +1,214 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:table_name) { :_test_partitioned_test }
+ let(:model) { double('model', table_name: table_name, ignored_columns: %w[partition]) }
+ let(:next_partition_if) { double('next_partition_if') }
+ let(:detach_partition_if) { double('detach_partition_if') }
+
+ subject(:strategy) do
+ described_class.new(model, :partition,
+ next_partition_if: next_partition_if,
+ detach_partition_if: detach_partition_if)
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ create table #{table_name}
+ (
+ id serial not null,
+ partition bigint not null default 2,
+ created_at timestamptz not null,
+ primary key (id, partition)
+ )
+ partition by list(partition);
+
+ create table #{table_name}_1
+ partition of #{table_name} for values in (1);
+
+ create table #{table_name}_2
+ partition of #{table_name} for values in (2);
+ SQL
+ end
+
+ describe '#current_partitions' do
+ it 'detects both partitions' do
+ expect(strategy.current_partitions).to eq([
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 1, partition_name: '_test_partitioned_test_1'),
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 2, partition_name: '_test_partitioned_test_2')
+ ])
+ end
+ end
+
+ describe '#active_partition' do
+ it 'is the partition with the largest value' do
+ expect(strategy.active_partition.value).to eq(2)
+ end
+ end
+
+ describe '#missing_partitions' do
+ context 'when next_partition_if returns true' do
+ let(:next_partition_if) { proc { true } }
+
+ it 'is a partition definition for the next partition in the series' do
+ extra = strategy.missing_partitions
+
+ expect(extra.length).to eq(1)
+ expect(extra.first.value).to eq(3)
+ end
+ end
+
+ context 'when next_partition_if returns false' do
+ let(:next_partition_if) { proc { false } }
+
+ it 'is empty' do
+ expect(strategy.missing_partitions).to be_empty
+ end
+ end
+
+ context 'when there are no partitions for the table' do
+ it 'returns a partition for value 1' do
+ connection.execute("drop table #{table_name}_1; drop table #{table_name}_2;")
+
+ missing_partitions = strategy.missing_partitions
+
+ expect(missing_partitions.size).to eq(1)
+ missing_partition = missing_partitions.first
+
+ expect(missing_partition.value).to eq(1)
+ end
+ end
+ end
+
+ describe '#extra_partitions' do
+ before do
+ (3..10).each do |i|
+ connection.execute("CREATE TABLE #{table_name}_#{i} PARTITION OF #{table_name} FOR VALUES IN (#{i})")
+ end
+ end
+
+ context 'when some partitions are true for detach_partition_if' do
+ let(:detach_partition_if) { ->(p) { p != 5 } }
+
+ it 'is the leading set of partitions before that value' do
+ expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4)
+ end
+ end
+
+ context 'when all partitions are true for detach_partition_if' do
+ let(:detach_partition_if) { proc { true } }
+
+ it 'is all but the most recent partition', :aggregate_failures do
+ expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4, 5, 6, 7, 8, 9)
+
+ expect(strategy.current_partitions.map(&:value).max).to eq(10)
+ end
+ end
+ end
+
+ describe '#initial_partition' do
+ it 'starts with the value 1', :aggregate_failures do
+ initial_partition = strategy.initial_partition
+ expect(initial_partition.value).to eq(1)
+ expect(initial_partition.table).to eq(strategy.table_name)
+ expect(initial_partition.partition_name).to eq("#{strategy.table_name}_1")
+ end
+ end
+
+ describe '#next_partition' do
+ it 'is one after the active partition', :aggregate_failures do
+ expect(strategy).to receive(:active_partition).and_return(double(value: 5))
+ next_partition = strategy.next_partition
+
+ expect(next_partition.value).to eq(6)
+ expect(next_partition.table).to eq(strategy.table_name)
+ expect(next_partition.partition_name).to eq("#{strategy.table_name}_6")
+ end
+ end
+
+ describe '#ensure_partitioning_column_ignored!' do
+ it 'raises when the column is not ignored' do
+ expect do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ partitioned_by :partition, strategy: :sliding_list,
+ next_partition_if: proc { false },
+ detach_partition_if: proc { false }
+ end
+ end.to raise_error(/ignored_columns/)
+ end
+
+ it 'does not raise when the column is ignored' do
+ expect do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.ignored_columns = [:partition]
+
+ partitioned_by :partition, strategy: :sliding_list,
+ next_partition_if: proc { false },
+ detach_partition_if: proc { false }
+ end
+ end.not_to raise_error
+ end
+ end
+ context 'redirecting inserts as the active partition changes' do
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = '_test_partitioned_test'
+ self.primary_key = :id
+
+ self.ignored_columns = %w[partition]
+
+ # method().call cannot be detected by rspec, so we add a layer of indirection here
+ def self.next_partition_if_wrapper(...)
+ next_partition?(...)
+ end
+
+ def self.detach_partition_if_wrapper(...)
+ detach_partition?(...)
+ end
+ partitioned_by :partition, strategy: :sliding_list,
+ next_partition_if: method(:next_partition_if_wrapper),
+ detach_partition_if: method(:detach_partition_if_wrapper)
+
+ def self.next_partition?(current_partition)
+ end
+
+ def self.detach_partition?(partition)
+ end
+ end
+ end
+
+ it 'redirects to the new partition', :aggregate_failures do
+ partition_2_model = model.create! # Goes in partition 2
+
+ allow(model).to receive(:next_partition?) do
+ model.partitioning_strategy.active_partition.value < 3
+ end
+
+ allow(model).to receive(:detach_partition?).and_return(false)
+
+ Gitlab::Database::Partitioning::PartitionManager.new(model).sync_partitions
+
+ partition_3_model = model.create!
+
+ # Rails doesn't pick up on database default changes, so we need to reload
+ # We also want to grab the partition column to verify what it was set to.
+ # In normal operation we make rails ignore it so that we can use a changing default
+ # So we force select * to load it
+ all_columns = model.select(model.arel_table[Arel.star])
+ partition_2_model = all_columns.find(partition_2_model.id)
+ partition_3_model = all_columns.find(partition_3_model.id)
+
+ expect(partition_2_model.partition).to eq(2)
+ expect(partition_3_model.partition).to eq(3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb
index 82a1c7143d5..34c72893c53 100644
--- a/spec/lib/gitlab/database/query_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzer_spec.rb
@@ -128,11 +128,20 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
it 'does not call analyze on suppressed analyzers' do
expect(analyzer).to receive(:suppressed?).and_return(true)
+ expect(analyzer).to receive(:requires_tracking?).and_return(false)
expect(analyzer).not_to receive(:analyze)
expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error
end
+ it 'does call analyze on suppressed analyzers if some queries require tracking' do
+ expect(analyzer).to receive(:suppressed?).and_return(true)
+ expect(analyzer).to receive(:requires_tracking?).and_return(true)
+ expect(analyzer).to receive(:analyze)
+
+ expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error
+ end
+
def process_sql(sql)
described_class.instance.within do
ApplicationRecord.load_balancer.read_write do |connection|
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index ab5f05e3ec4..86e74cf5177 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
process_sql(ActiveRecord::Base, "SELECT 1 FROM projects")
end
- context 'properly observes all queries', :mocked_ci_connection do
+ context 'properly observes all queries', :add_ci_connection do
using RSpec::Parameterized::TableSyntax
where do
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
index eb8ccb0bd89..c41b4eeea10 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
@@ -92,6 +92,23 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
end
end
+
+ context 'when comments are added to the front of query strings' do
+ around do |example|
+ prepend_comment_was = Marginalia::Comment.prepend_comment
+ Marginalia::Comment.prepend_comment = true
+
+ example.run
+
+ Marginalia::Comment.prepend_comment = prepend_comment_was
+ end
+
+ it 'raises error' do
+ Project.transaction do
+ expect { run_queries }.to raise_error /Cross-database data modification/
+ end
+ end
+ end
end
context 'when executing a SELECT FOR UPDATE query' do
@@ -164,4 +181,49 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/
end
end
+
+ context 'when execution is rescued with StandardError' do
+ it 'raises cross-database data modification exception' do
+ expect do
+ Project.transaction do
+ project.touch
+ project.connection.execute('UPDATE foo_bars_undefined_table SET a=1 WHERE id = -1')
+ end
+ rescue StandardError
+ # Ensures that standard rescue does not silence errors
+ end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/
+ end
+ end
+
+ context 'when uniquiness validation is tested', type: :model do
+ subject { build(:ci_variable) }
+
+ it 'does not raise exceptions' do
+ expect do
+ is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope).with_message(/\(\w+\) has already been taken/)
+ end.not_to raise_error
+ end
+ end
+
+ context 'when doing rollback in a suppressed block' do
+ it 'does not raise misaligned transactions exception' do
+ expect do
+ # This is non-materialised transaction:
+ # 1. the transaction will be open on a write (project.touch) (in a suppressed block)
+ # 2. the rescue will be handled outside of suppressed block
+ #
+ # This will create misaligned boundaries since BEGIN
+ # of transaction will be executed within a suppressed block
+ Project.transaction do
+ described_class.with_suppressed do
+ project.touch
+
+ raise 'force rollback'
+ end
+
+ # the ensure of `.transaction` executes `ROLLBACK TO SAVEPOINT`
+ end
+ end.to raise_error /force rollback/
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
index 085fd3061ad..0afbe46b7f1 100644
--- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -15,10 +15,18 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
let(:action) { create(:reindex_action, index: index) }
let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
- let(:lease_key) { 'gitlab/database/reindexing/coordinator' }
+ let(:lease_key) { "gitlab/database/reindexing/coordinator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
let(:lease_timeout) { 1.day }
let(:uuid) { 'uuid' }
+ around do |example|
+ model = Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME]
+
+ Gitlab::Database::SharedModel.using_connection(model.connection) do
+ example.run
+ end
+ end
+
before do
swapout_view_for_table(:postgres_indexes)
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 13aff343432..0c576505e07 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -6,6 +6,63 @@ RSpec.describe Gitlab::Database::Reindexing do
include ExclusiveLeaseHelpers
include Database::DatabaseHelpers
+ describe '.invoke' do
+ let(:databases) { Gitlab::Database.database_base_models }
+ let(:databases_count) { databases.count }
+
+ it 'cleans up any leftover indexes' do
+ expect(described_class).to receive(:cleanup_leftovers!).exactly(databases_count).times
+
+ described_class.invoke
+ end
+
+ context 'when there is an error raised' do
+ it 'logs and re-raise' do
+ expect(described_class).to receive(:automatic_reindexing).and_raise('Unexpected!')
+ expect(Gitlab::AppLogger).to receive(:error)
+
+ expect { described_class.invoke }.to raise_error('Unexpected!')
+ end
+ end
+
+ context 'when async index creation is enabled' do
+ it 'executes async index creation prior to any reindexing actions' do
+ stub_feature_flags(database_async_index_creation: true)
+
+ expect(Gitlab::Database::AsyncIndexes).to receive(:create_pending_indexes!).ordered.exactly(databases_count).times
+ expect(described_class).to receive(:automatic_reindexing).ordered.exactly(databases_count).times
+
+ described_class.invoke
+ end
+ end
+
+ context 'when async index creation is disabled' do
+ it 'does not execute async index creation' do
+ stub_feature_flags(database_async_index_creation: false)
+
+ expect(Gitlab::Database::AsyncIndexes).not_to receive(:create_pending_indexes!)
+
+ described_class.invoke
+ end
+ end
+
+ context 'calls automatic reindexing' do
+ it 'uses all candidate indexes' do
+ expect(described_class).to receive(:automatic_reindexing).exactly(databases_count).times
+
+ described_class.invoke
+ end
+
+ context 'when explicit database is given' do
+ it 'skips other databases' do
+ expect(described_class).to receive(:automatic_reindexing).once
+
+ described_class.invoke(Gitlab::Database::PRIMARY_DATABASE_NAME)
+ end
+ end
+ end
+ end
+
describe '.automatic_reindexing' do
subject { described_class.automatic_reindexing(maximum_records: limit) }
@@ -133,10 +190,19 @@ RSpec.describe Gitlab::Database::Reindexing do
end
describe '.cleanup_leftovers!' do
- subject { described_class.cleanup_leftovers! }
+ subject(:cleanup_leftovers) { described_class.cleanup_leftovers! }
+
+ let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection) { model.connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
before do
- ApplicationRecord.connection.execute(<<~SQL)
+ connection.execute(<<~SQL)
CREATE INDEX foobar_ccnew ON users (id);
CREATE INDEX foobar_ccnew1 ON users (id);
SQL
@@ -150,11 +216,11 @@ RSpec.describe Gitlab::Database::Reindexing do
expect_query("DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"foobar_ccnew1\"")
expect_query("RESET idle_in_transaction_session_timeout; RESET lock_timeout")
- subject
+ cleanup_leftovers
end
def expect_query(sql)
- expect(ApplicationRecord.connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql|
+ expect(connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql|
method.call(sql.sub(/CONCURRENTLY/, ''))
end
end
diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb
index 94f2b5a3434..54af4a0c4dc 100644
--- a/spec/lib/gitlab/database/shared_model_spec.rb
+++ b/spec/lib/gitlab/database/shared_model_spec.rb
@@ -84,4 +84,16 @@ RSpec.describe Gitlab::Database::SharedModel do
expect(described_class.connection).to be(original_connection)
end
end
+
+ describe '#connection_db_config' do
+ it 'returns the class connection_db_config' do
+ shared_model_class = Class.new(described_class) do
+ self.table_name = 'postgres_async_indexes'
+ end
+
+ shared_model = shared_model_class.new
+
+ expect(shared_model.connection_db_config). to eq(described_class.connection_db_config)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/type/json_pg_safe_spec.rb b/spec/lib/gitlab/database/type/json_pg_safe_spec.rb
new file mode 100644
index 00000000000..91dc6f39aa7
--- /dev/null
+++ b/spec/lib/gitlab/database/type/json_pg_safe_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Type::JsonPgSafe do
+ let(:type) { described_class.new }
+
+ describe '#serialize' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { type.serialize(value) }
+
+ where(:value, :json) do
+ nil | nil
+ 1 | '1'
+ 1.0 | '1.0'
+ "str\0ing\u0000" | '"string"'
+ ["\0arr", "a\u0000y"] | '["arr","ay"]'
+ { "key\0" => "value\u0000\0" } | '{"key":"value"}'
+ end
+
+ with_them do
+ it { is_expected.to eq(json) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/custom_diff_spec.rb b/spec/lib/gitlab/diff/custom_diff_spec.rb
new file mode 100644
index 00000000000..246508d2e1e
--- /dev/null
+++ b/spec/lib/gitlab/diff/custom_diff_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::CustomDiff do
+ include RepoHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:ipynb_blob) { repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') }
+ let(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') }
+
+ describe '#preprocess_before_diff' do
+ context 'for ipynb files' do
+ it 'transforms the diff' do
+ expect(described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)).not_to include('cells')
+ end
+
+ it 'adds the blob to the list of transformed blobs' do
+ described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
+
+ expect(described_class.transformed_for_diff?(ipynb_blob)).to be_truthy
+ end
+ end
+
+ context 'for other files' do
+ it 'returns nil' do
+ expect(described_class.preprocess_before_diff(blob.path, nil, blob)).to be_nil
+ end
+
+ it 'does not add the blob to the list of transformed blobs' do
+ described_class.preprocess_before_diff(blob.path, nil, blob)
+
+ expect(described_class.transformed_for_diff?(blob)).to be_falsey
+ end
+ end
+ end
+
+ describe '#transformed_blob_data' do
+ it 'transforms blob data if file was processed' do
+ described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
+
+ expect(described_class.transformed_blob_data(ipynb_blob)).not_to include('cells')
+ end
+
+ it 'does not transform blob data if file was not processed' do
+ expect(described_class.transformed_blob_data(ipynb_blob)).to be_nil
+ end
+ end
+
+ describe '#transformed_blob_language' do
+ it 'is md when file was preprocessed' do
+ described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
+
+ expect(described_class.transformed_blob_language(ipynb_blob)).to eq('md')
+ end
+
+ it 'is nil for a .ipynb blob that was not preprocessed' do
+ expect(described_class.transformed_blob_language(ipynb_blob)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 4b437397688..45a49a36fe2 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::Diff::File do
describe 'initialize' do
context 'when file is ipynb with a change after transformation' do
- let(:commit) { project.commit("f6b7a707") }
+ let(:commit) { project.commit("532c837") }
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::Diff::File do
end
it 'recreates the diff by transforming the files' do
- expect(diff_file.diff.diff).not_to include('"| Fake')
+ expect(diff_file.diff.diff).not_to include('cell_type')
end
end
@@ -73,7 +73,7 @@ RSpec.describe Gitlab::Diff::File do
end
it 'does not recreate the diff' do
- expect(diff_file.diff.diff).to include('"| Fake')
+ expect(diff_file.diff.diff).to include('cell_type')
end
end
end
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index 94b28c38fa2..624160d2f48 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -151,20 +151,6 @@ RSpec.describe Gitlab::Diff::Highlight do
expect(subject[2].rich_text).to eq(%Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n})
expect(subject[2].rich_text).to be_html_safe
end
-
- context 'when limited_diff_highlighting is disabled' do
- before do
- stub_feature_flags(limited_diff_highlighting: false)
- stub_feature_flags(diff_line_syntax_highlighting: false)
- end
-
- it 'blobs are highlighted as plain text with loading all data' do
- expect(diff_file.blob).to receive(:load_all_data!).twice
-
- code = %Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n}
- expect(subject[2].rich_text).to eq(code)
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index bd4f1d164a8..10098a66ae9 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -101,6 +101,17 @@ RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do
end
end
+ context 'when all lines of email are quotes' do
+ let(:email_raw) { email_fixture('emails/valid_new_issue_with_only_quotes.eml') }
+
+ it 'creates email with correct body' do
+ receiver.execute
+
+ issue = Issue.last
+ expect(issue.description).to include('This email has been forwarded without new content.')
+ end
+ end
+
context "something is wrong" do
context "when the issue could not be saved" do
before do
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index c579027788d..7c34fb1a926 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
let(:email_raw) { email_fixture('emails/service_desk.eml') }
let(:author_email) { 'jake@adventuretime.ooo' }
+ let(:message_id) { 'CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com' }
+
let_it_be(:group) { create(:group, :private, name: "email") }
let(:expected_description) do
@@ -40,6 +42,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect(new_issue.all_references.all).to be_empty
expect(new_issue.title).to eq("The message subject! @all")
expect(new_issue.description).to eq(expected_description.strip)
+ expect(new_issue.email&.email_message_id).to eq(message_id)
end
it 'creates an issue_email_participant' do
@@ -72,6 +75,95 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
it_behaves_like 'a new issue request'
end
+ context 'when replying to issue creation email' do
+ def receive_reply
+ reply_email_raw = email_fixture('emails/service_desk_reply.eml')
+
+ second_receiver = Gitlab::Email::Receiver.new(reply_email_raw)
+ second_receiver.execute
+ end
+
+ context 'when an issue with message_id has been found' do
+ before do
+ receiver.execute
+ end
+
+ subject do
+ receive_reply
+ end
+
+ it 'does not create an additional issue' do
+ expect { subject }.not_to change { Issue.count }
+ end
+
+ it 'adds a comment to the created issue' do
+ subject
+
+ notes = Issue.last.notes
+ new_note = notes.first
+
+ expect(notes.count).to eq(1)
+ expect(new_note.note).to eq("Service desk reply!\n\n`/label ~label2`")
+ expect(new_note.author).to eql(User.support_bot)
+ end
+
+ it 'does not send thank you email' do
+ expect(Notify).not_to receive(:service_desk_thank_you_email)
+
+ subject
+ end
+
+ context 'when issue_email_participants FF is enabled' do
+ it 'creates 2 issue_email_participants' do
+ subject
+
+ expect(Issue.last.issue_email_participants.map(&:email))
+ .to match_array(%w(alan@adventuretime.ooo jake@adventuretime.ooo))
+ end
+ end
+
+ context 'when issue_email_participants FF is disabled' do
+ before do
+ stub_feature_flags(issue_email_participants: false)
+ end
+
+ it 'creates only 1 issue_email_participant' do
+ subject
+
+ expect(Issue.last.issue_email_participants.map(&:email))
+ .to match_array(%w(jake@adventuretime.ooo))
+ end
+ end
+ end
+
+ context 'when an issue with message_id has not been found' do
+ subject do
+ receive_reply
+ end
+
+ it 'creates a new issue correctly' do
+ expect { subject }.to change { Issue.count }.by(1)
+
+ issue = Issue.last
+
+ expect(issue.description).to eq("Service desk reply!\n\n`/label ~label2`")
+ end
+
+ it 'sends thank you email once' do
+ expect(Notify).to receive(:service_desk_thank_you_email).once.and_return(double(deliver_later: true))
+
+ subject
+ end
+
+ it 'creates 1 issue_email_participant' do
+ subject
+
+ expect(Issue.last.issue_email_participants.map(&:email))
+ .to match_array(%w(alan@adventuretime.ooo))
+ end
+ end
+ end
+
context 'when using issue templates' do
let_it_be(:user) { create(:user) }
@@ -270,6 +362,20 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
end
+ context 'when issue email creation fails' do
+ before do
+ allow(::Issue::Email).to receive(:create!).and_raise(StandardError)
+ end
+
+ it 'still creates a new issue' do
+ expect { receiver.execute }.to change { Issue.count }.by(1)
+ end
+
+ it 'does not create issue email record' do
+ expect { receiver.execute }.not_to change { Issue::Email.count }
+ end
+ end
+
context 'when rate limiting is in effect', :freeze_time, :clean_gitlab_redis_rate_limiting do
let(:receiver) { Gitlab::Email::Receiver.new(email_raw) }
@@ -291,19 +397,19 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
rescue RateLimitedService::RateLimitedError
end.to change { Issue.count }.by(1)
end
+ end
- context 'when requests are sent by different users' do
- let(:email_raw_2) { email_fixture('emails/service_desk_forwarded.eml') }
- let(:receiver2) { Gitlab::Email::Receiver.new(email_raw_2) }
+ context 'when requests are sent by different users' do
+ let(:email_raw_2) { email_fixture('emails/service_desk_forwarded.eml') }
+ let(:receiver2) { Gitlab::Email::Receiver.new(email_raw_2) }
- subject do
- receiver.execute
- receiver2.execute
- end
+ subject do
+ receiver.execute
+ receiver2.execute
+ end
- it 'creates 2 issues' do
- expect { subject }.to change { Issue.count }.by(2)
- end
+ it 'creates 2 issues' do
+ expect { subject }.to change { Issue.count }.by(2)
end
end
@@ -389,6 +495,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when the email is forwarded through an alias' do
let(:author_email) { 'jake.g@adventuretime.ooo' }
let(:email_raw) { email_fixture('emails/service_desk_forwarded.eml') }
+ let(:message_id) { 'CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=fdskbsf@mail.gmail.com' }
it_behaves_like 'a new issue request'
end
diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
index 352eb596cd9..7dd4ee7e25d 100644
--- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
+++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
@@ -50,6 +50,7 @@ RSpec.describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
expect(mail.header['To'].value).to eq('test@example.com')
expect(mail.header['From'].value).to eq('info@example.com')
expect(mail.header['Content-Type'].value).to match('multipart/signed').and match('protocol="application/x-pkcs7-signature"')
+ expect(mail.header.include?('Content-Disposition')).to eq(false)
# verify signature and obtain pkcs7 encoded content
p7enc = Gitlab::Email::Smime::Signer.verify_signature(
diff --git a/spec/lib/gitlab/email/service_desk_receiver_spec.rb b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
index 6ba58ad5e93..49cbec6fffc 100644
--- a/spec/lib/gitlab/email/service_desk_receiver_spec.rb
+++ b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
@@ -9,9 +9,7 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
context 'when the email contains a valid email address' do
before do
stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
- end
- it 'finds the service desk key' do
handler = double(execute: true, metrics_event: true, metrics_params: true)
expected_params = [
an_instance_of(Mail::Message), nil,
@@ -20,8 +18,38 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
expect(Gitlab::Email::Handler::ServiceDeskHandler)
.to receive(:new).with(*expected_params).and_return(handler)
+ end
+
+ context 'when in a To header' do
+ it 'finds the service desk key' do
+ receiver.execute
+ end
+ end
+
+ context 'when the email contains a valid email address in a header' do
+ context 'when in a Delivered-To header' do
+ let(:email) { fixture_file('emails/service_desk_custom_address_reply.eml') }
+
+ it 'finds the service desk key' do
+ receiver.execute
+ end
+ end
+
+ context 'when in a Envelope-To header' do
+ let(:email) { fixture_file('emails/service_desk_custom_address_envelope_to.eml') }
+
+ it 'finds the service desk key' do
+ receiver.execute
+ end
+ end
+
+ context 'when in a X-Envelope-To header' do
+ let(:email) { fixture_file('emails/service_desk_custom_address_x_envelope_to.eml') }
- receiver.execute
+ it 'finds the service desk key' do
+ receiver.execute
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/empty_search_results_spec.rb b/spec/lib/gitlab/empty_search_results_spec.rb
new file mode 100644
index 00000000000..e79586bef68
--- /dev/null
+++ b/spec/lib/gitlab/empty_search_results_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EmptySearchResults do
+ subject { described_class.new }
+
+ describe '#objects' do
+ it 'returns an empty array' do
+ expect(subject.objects).to match_array([])
+ end
+ end
+
+ describe '#formatted_count' do
+ it 'returns a zero' do
+ expect(subject.formatted_count).to eq('0')
+ end
+ end
+
+ describe '#highlight_map' do
+ it 'returns an empty hash' do
+ expect(subject.highlight_map).to eq({})
+ end
+ end
+
+ describe '#aggregations' do
+ it 'returns an empty array' do
+ expect(subject.objects).to match_array([])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 7ad1f52780a..a5d44963f4b 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -205,16 +205,6 @@ RSpec.describe Gitlab::ErrorTracking do
expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
end
end
-
- context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do
- it 'injects the normalized sql query into extra' do
- allow(exception).to receive(:cause).and_return(ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1'))
-
- track_exception
-
- expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
- end
- end
end
context 'event processors' do
diff --git a/spec/lib/gitlab/etag_caching/store_spec.rb b/spec/lib/gitlab/etag_caching/store_spec.rb
index 46195e64715..6188a3fc8b3 100644
--- a/spec/lib/gitlab/etag_caching/store_spec.rb
+++ b/spec/lib/gitlab/etag_caching/store_spec.rb
@@ -80,5 +80,19 @@ RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do
expect(store.get(key)).to eq(etag)
end
end
+
+ context 'with multiple keys' do
+ let(:keys) { ['/my-group/my-project/builds/234.json', '/api/graphql:pipelines/id/5'] }
+
+ it 'stores and returns multiple values' do
+ etags = store.touch(*keys)
+
+ expect(etags.size).to eq(keys.size)
+
+ keys.each_with_index do |key, i|
+ expect(store.get(key)).to eq(etags[i])
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/exception_log_formatter_spec.rb b/spec/lib/gitlab/exception_log_formatter_spec.rb
new file mode 100644
index 00000000000..beeeeb2b64c
--- /dev/null
+++ b/spec/lib/gitlab/exception_log_formatter_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ExceptionLogFormatter do
+ describe '.format!' do
+ let(:exception) { RuntimeError.new('bad request') }
+ let(:backtrace) { caller }
+
+ let(:payload) { {} }
+
+ before do
+ allow(exception).to receive(:backtrace).and_return(backtrace)
+ end
+
+ it 'adds exception data to log' do
+ described_class.format!(exception, payload)
+
+ expect(payload['exception.class']).to eq('RuntimeError')
+ expect(payload['exception.message']).to eq('bad request')
+ expect(payload['exception.backtrace']).to eq(Gitlab::BacktraceCleaner.clean_backtrace(backtrace))
+ expect(payload['exception.sql']).to be_nil
+ end
+
+ context 'when exception is ActiveRecord::StatementInvalid' do
+ let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
+
+ it 'adds the normalized SQL query to payload' do
+ described_class.format!(exception, payload)
+
+ expect(payload['exception.sql']).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ end
+ end
+
+ context 'when the ActiveRecord::StatementInvalid is wrapped in another exception' do
+ before do
+ allow(exception).to receive(:cause).and_return(ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1'))
+ end
+
+ it 'adds the normalized SQL query to payload' do
+ described_class.format!(exception, payload)
+
+ expect(payload['exception.sql']).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ end
+ end
+
+ context 'when the ActiveRecord::StatementInvalid is a bad query' do
+ let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT SELECT FROM SELECT') }
+
+ it 'adds the query as-is to payload' do
+ described_class.format!(exception, payload)
+
+ expect(payload['exception.sql']).to eq('SELECT SELECT FROM SELECT')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 1f7b7b90467..8a96771eeb8 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
describe '#push_frontend_experiment' do
it 'pushes an experiment to the frontend' do
- gon = instance_double('gon')
+ gon = class_double('Gon')
stub_experiment_for_subject(my_experiment: true)
allow(controller).to receive(:gon).and_return(gon)
diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb
index d52ab3a8983..d9bf85460b3 100644
--- a/spec/lib/gitlab/experimentation/experiment_spec.rb
+++ b/spec/lib/gitlab/experimentation/experiment_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Gitlab::Experimentation::Experiment do
before do
skip_feature_flags_yaml_validation
skip_default_enabled_yaml_check
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
feature = double('FeatureFlag', percentage_of_time_value: percentage, enabled?: true)
allow(Feature).to receive(:get).with(:experiment_key_experiment_percentage).and_return(feature)
end
diff --git a/spec/lib/gitlab/git/diff_stats_collection_spec.rb b/spec/lib/gitlab/git/diff_stats_collection_spec.rb
index f2fe03829be..0876a88a2ee 100644
--- a/spec/lib/gitlab/git/diff_stats_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_stats_collection_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::Git::DiffStatsCollection do
end
it 'returns capped number when it is bigger than max_files' do
- allow(::Commit).to receive(:max_diff_options).and_return(max_files: 1)
+ allow(::Commit).to receive(:diff_max_files).and_return(1)
expect(collection.real_size).to eq('1+')
end
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 16cea1dc1a3..b2603e099e6 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
describe '#running_puma_with_multiple_threads?' do
context 'when using Puma' do
before do
- stub_const('::Puma', class_double('Puma'))
+ stub_const('::Puma', double('puma constant'))
allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 005f8ecaa3a..97ba177da71 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
end
describe '#dir?' do
- let(:dir) { entries.select(&:dir?).first }
+ let(:dir) { entries.find(&:dir?) }
it { expect(dir).to be_kind_of Gitlab::Git::Tree }
it { expect(dir.id).to eq('3c122d2b7830eca25235131070602575cf8b41a1') }
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
end
describe '#file?' do
- let(:file) { entries.select(&:file?).first }
+ let(:file) { entries.find(&:file?) }
it { expect(file).to be_kind_of Gitlab::Git::Tree }
it { expect(file.id).to eq('dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82') }
@@ -143,21 +143,21 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
end
describe '#readme?' do
- let(:file) { entries.select(&:readme?).first }
+ let(:file) { entries.find(&:readme?) }
it { expect(file).to be_kind_of Gitlab::Git::Tree }
it { expect(file.name).to eq('README.md') }
end
describe '#contributing?' do
- let(:file) { entries.select(&:contributing?).first }
+ let(:file) { entries.find(&:contributing?) }
it { expect(file).to be_kind_of Gitlab::Git::Tree }
it { expect(file.name).to eq('CONTRIBUTING.md') }
end
describe '#submodule?' do
- let(:submodule) { entries.select(&:submodule?).first }
+ let(:submodule) { entries.find(&:submodule?) }
it { expect(submodule).to be_kind_of Gitlab::Git::Tree }
it { expect(submodule.id).to eq('79bceae69cb5750d6567b223597999bfa91cb3b9') }
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index d8e397dd6f3..8d9ab5db886 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -13,10 +13,6 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
let(:client) { described_class.new(repository) }
describe '#diff_from_parent' do
- before do
- stub_feature_flags(increased_diff_limits: false)
- end
-
context 'when a commit has a parent' do
it 'sends an RPC request with the parent ID as left commit' do
request = Gitaly::CommitDiffRequest.new(
@@ -108,45 +104,6 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
end
end
- describe '#between' do
- let(:from) { 'master' }
- let(:to) { Gitlab::Git::EMPTY_TREE_ID }
-
- context 'with between_commits_via_list_commits enabled' do
- before do
- stub_feature_flags(between_commits_via_list_commits: true)
- end
-
- it 'sends an RPC request' do
- request = Gitaly::ListCommitsRequest.new(
- repository: repository_message, revisions: ["^" + from, to], reverse: true
- )
-
- expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:list_commits)
- .with(request, kind_of(Hash)).and_return([])
-
- described_class.new(repository).between(from, to)
- end
- end
-
- context 'with between_commits_via_list_commits disabled' do
- before do
- stub_feature_flags(between_commits_via_list_commits: false)
- end
-
- it 'sends an RPC request' do
- request = Gitaly::CommitsBetweenRequest.new(
- repository: repository_message, from: from, to: to
- )
-
- expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:commits_between)
- .with(request, kind_of(Hash)).and_return([])
-
- described_class.new(repository).between(from, to)
- end
- end
- end
-
describe '#diff_stats' do
let(:left_commit_id) { 'master' }
let(:right_commit_id) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
diff --git a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
index 0eecdfcb630..d0787d8b673 100644
--- a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb
@@ -43,10 +43,10 @@ RSpec.describe Gitlab::GitalyClient::ConflictFilesStitcher do
messages = [
double(files: [double(header: header_1), double(header: nil, content: content_1[0..5])]),
- double(files: [double(header: nil, content: content_1[6..-1])]),
+ double(files: [double(header: nil, content: content_1[6..])]),
double(files: [double(header: header_2)]),
double(files: [double(header: nil, content: content_2[0..5]), double(header: nil, content: content_2[6..10])]),
- double(files: [double(header: nil, content: content_2[11..-1])])
+ double(files: [double(header: nil, content: content_2[11..])])
]
conflict_files = described_class.new(messages, target_repository.gitaly_repository).to_a
diff --git a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb
index 113c47b4f2c..54c84ddc56f 100644
--- a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::GitalyClient::DiffStitcher do
msg_2.raw_patch_data = diff_2.patch[0..100]
msg_2.end_of_patch = false
- msg_3 = OpenStruct.new(raw_patch_data: diff_2.patch[101..-1], end_of_patch: true)
+ msg_3 = OpenStruct.new(raw_patch_data: diff_2.patch[101..], end_of_patch: true)
msg_4 = OpenStruct.new(diff_3.to_h.except(:patch))
msg_4.raw_patch_data = diff_3.patch
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 194dfb228ee..c4d05e92633 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -221,6 +221,50 @@ RSpec.describe Gitlab::GithubImport::Client do
expect(client.with_rate_limit { 10 }).to eq(10)
end
+
+ context 'when Faraday error received from octokit', :aggregate_failures do
+ let(:error_class) { described_class::CLIENT_CONNECTION_ERROR }
+ let(:info_params) { { 'error.class': error_class } }
+ let(:block_to_rate_limit) { -> { client.pull_request('foo/bar', 999) } }
+
+ context 'when rate_limiting_enabled is true' do
+ it 'retries on error and succeeds' do
+ allow_retry
+
+ expect(client).to receive(:requests_remaining?).twice.and_return(true)
+ expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once
+
+ expect(client.with_rate_limit(&block_to_rate_limit)).to be(true)
+ end
+
+ it 'retries and does not succeed' do
+ allow(client).to receive(:requests_remaining?).and_return(true)
+ allow(client.octokit).to receive(:pull_request).and_raise(error_class, 'execution expired')
+
+ expect { client.with_rate_limit(&block_to_rate_limit) }.to raise_error(error_class, 'execution expired')
+ end
+ end
+
+ context 'when rate_limiting_enabled is false' do
+ before do
+ allow(client).to receive(:rate_limiting_enabled?).and_return(false)
+ end
+
+ it 'retries on error and succeeds' do
+ allow_retry
+
+ expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once
+
+ expect(client.with_rate_limit(&block_to_rate_limit)).to be(true)
+ end
+
+ it 'retries and does not succeed' do
+ allow(client.octokit).to receive(:pull_request).and_raise(error_class, 'execution expired')
+
+ expect { client.with_rate_limit(&block_to_rate_limit) }.to raise_error(error_class, 'execution expired')
+ end
+ end
+ end
end
describe '#requests_remaining?' do
@@ -505,6 +549,25 @@ RSpec.describe Gitlab::GithubImport::Client do
client.search_repos_by_name('test')
end
+
+ context 'when Faraday error received from octokit', :aggregate_failures do
+ let(:error_class) { described_class::CLIENT_CONNECTION_ERROR }
+ let(:info_params) { { 'error.class': error_class } }
+
+ it 'retries on error and succeeds' do
+ allow_retry(:search_repositories)
+
+ expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once
+
+ expect(client.search_repos_by_name('test')).to be(true)
+ end
+
+ it 'retries and does not succeed' do
+ allow(client.octokit).to receive(:search_repositories).and_raise(error_class, 'execution expired')
+
+ expect { client.search_repos_by_name('test') }.to raise_error(error_class, 'execution expired')
+ end
+ end
end
describe '#search_query' do
@@ -531,4 +594,12 @@ RSpec.describe Gitlab::GithubImport::Client do
end
end
end
+
+ def allow_retry(method = :pull_request)
+ call_count = 0
+ allow(client.octokit).to receive(method) do
+ call_count += 1
+ call_count > 1 ? true : raise(described_class::CLIENT_CONNECTION_ERROR, 'execution expired')
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 0448ada6bca..a0e78186caa 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -173,9 +173,11 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
EOB
end
- it 'imports the note as diff note' do
+ before do
stub_user_finder(user.id, true)
+ end
+ it 'imports the note as diff note' do
expect { subject.execute }
.to change(DiffNote, :count)
.by(1)
@@ -212,6 +214,29 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail
```
NOTE
end
+
+ context 'when the note diff file creation fails' do
+ it 'falls back to the LegacyDiffNote' do
+ exception = ::DiffNote::NoteDiffFileCreationError.new('Failed to create diff note file')
+
+ expect_next_instance_of(::Import::Github::Notes::CreateService) do |service|
+ expect(service)
+ .to receive(:execute)
+ .and_raise(exception)
+ end
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:warn)
+ .with(
+ message: 'Failed to create diff note file',
+ 'error.class': 'DiffNote::NoteDiffFileCreationError'
+ )
+
+ expect { subject.execute }
+ .to change(LegacyDiffNote, :count)
+ .and not_change(DiffNote, :count)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
index 96d8acbd3de..165f543525d 100644
--- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
project_id: project.id,
author_id: user.id,
note: 'This is my note',
+ discussion_id: match(/\A[0-9a-f]{40}\z/),
system: false,
created_at: created_at,
updated_at: updated_at
@@ -82,6 +83,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
project_id: project.id,
author_id: project.creator_id,
note: "*Created by: alice*\n\nThis is my note",
+ discussion_id: match(/\A[0-9a-f]{40}\z/),
system: false,
created_at: created_at,
updated_at: updated_at
diff --git a/spec/lib/gitlab/github_import/parallel_importer_spec.rb b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
index c7b300ff043..d418e87284d 100644
--- a/spec/lib/gitlab/github_import/parallel_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
@@ -27,8 +27,13 @@ RSpec.describe Gitlab::GithubImport::ParallelImporter do
before do
create(:import_state, :started, project: project)
+ worker = double(:worker)
expect(Gitlab::GithubImport::Stage::ImportRepositoryWorker)
+ .to receive(:with_status)
+ .and_return(worker)
+
+ expect(worker)
.to receive(:perform_async)
.with(project.id)
.and_return('123')
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index 3d3f381b6d2..b8ed4cf608d 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::GonHelper do
end
it 'pushes a feature flag to the frontend' do
- gon = instance_double('gon')
+ gon = class_double('Gon')
thing = stub_feature_flag_gate('thing')
stub_feature_flags(my_feature_flag: thing)
diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
index 771f6e1ec46..5d444775e53 100644
--- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
+++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
@@ -192,7 +192,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
project: project,
commit_sha: commit_sha,
gpg_key: nil,
- gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..-1],
+ gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..],
verification_status: 'unknown_key'
end
diff --git a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
index 3ce09740ec8..968d938a911 100644
--- a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do
- let(:mock_request) { OpenStruct.new(env: {}) }
+ let(:mock_request) { double('env', env: {}) }
let(:response_body) { nil }
describe ".parameters" do
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do
describe 'when an exception is available' do
let(:exception) { RuntimeError.new('This is a test') }
let(:mock_request) do
- OpenStruct.new(
+ double('env',
env: {
::API::Helpers::API_EXCEPTION_ENV => exception
}
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
index 0047d24a215..0741088c915 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:arguments) { {} }
let(:query_type) { GraphQL::ObjectType.new }
let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
+ let(:context) { GraphQL::Query::Context.new(query: double('query', schema: schema), values: nil, object: nil) }
let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) }
let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) }
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
it 'returns the correct nodes' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
end
end
end
@@ -107,7 +107,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:arguments) { { after: encoded_cursor(projects[1]) } }
it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
end
context 'when the sort order is descending' do
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 8ef5f1147c5..b511a294f97 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:arguments) { {} }
let(:query_type) { GraphQL::ObjectType.new }
let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
+ let(:context) { GraphQL::Query::Context.new(query: double('query', schema: schema), values: nil, object: nil) }
subject(:connection) do
described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
@@ -120,7 +120,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.all.order(id: :desc) }
it 'returns the correct nodes' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
end
end
end
@@ -129,7 +129,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:arguments) { { after: encoded_cursor(projects[1]) } }
it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
end
context 'when the sort order is descending' do
diff --git a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
index d83ac4dabc5..5bc077a963e 100644
--- a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
+++ b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
@@ -1,6 +1,5 @@
# frozen_string_literal: true
-require "fast_spec_helper"
-require "support/graphql/fake_query_type"
+require "spec_helper"
RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do
let(:dummy_schema) do
@@ -49,4 +48,15 @@ RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do
dummy_schema.execute(query_string, variables: variables)
end
+
+ it 'logs exceptions for breaking queries' do
+ query_string = "query fooOperation { breakingField }"
+
+ expect(::Gitlab::GraphqlLogger).to receive(:info).with(a_hash_including({
+ 'exception.message' => 'This field is supposed to break',
+ 'exception.class' => 'RuntimeError'
+ }))
+
+ expect { dummy_schema.execute(query_string) }.to raise_error(/This field is supposed to break/)
+ end
end
diff --git a/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb
index ff6a76aa319..168f5aa529e 100644
--- a/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb
+++ b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require 'rspec-parameterized'
require "support/graphql/fake_query_type"
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::Graphql::Tracers::MetricsTracer do
end
with_them do
- it 'increments sli' do
+ it 'increments apdex sli' do
# Trigger initialization
fake_schema
@@ -56,5 +56,13 @@ RSpec.describe Gitlab::Graphql::Tracers::MetricsTracer do
fake_schema.execute("query lorem { helloWorld }")
end
end
+
+ it "does not record apdex for failing queries" do
+ query_string = "query fooOperation { breakingField }"
+
+ expect(Gitlab::Metrics::RailsSlis.graphql_query_apdex).not_to receive(:increment)
+
+ expect { fake_schema.execute(query_string) }.to raise_error(/This field is supposed to break/)
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb
index 7f837e28772..986120dcd95 100644
--- a/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb
+++ b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Gitlab::Graphql::Tracers::TimerTracer do
before do
current_time = 0
+ allow(tracer_spy).to receive(:trace)
allow(Gitlab::Metrics::System).to receive(:monotonic_time) do
current_time += expected_duration
end
@@ -30,6 +31,18 @@ RSpec.describe Gitlab::Graphql::Tracers::TimerTracer do
dummy_schema.execute(query_string)
+ expect_to_have_traced(tracer_spy, expected_duration, query_string)
+ end
+
+ it "adds a duration_s even if the query failed" do
+ query_string = "query fooOperation { breakingField }"
+
+ expect { dummy_schema.execute(query_string) }.to raise_error(/This field is supposed to break/)
+
+ expect_to_have_traced(tracer_spy, expected_duration, query_string)
+ end
+
+ def expect_to_have_traced(tracer_spy, expected_duration, query_string)
# "parse" and "execute_query" are just arbitrary trace events
expect(tracer_spy).to have_received(:trace).with("parse", {
duration_s: expected_duration,
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
index 9e6ad35861f..ddd681f75f0 100644
--- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
assignee_id
assignee_ids
author_id
+ blocking_discussions_resolved
created_at
description
head_pipeline_id
diff --git a/spec/lib/gitlab/import/import_failure_service_spec.rb b/spec/lib/gitlab/import/import_failure_service_spec.rb
index c16d4a7c804..e3fec63adde 100644
--- a/spec/lib/gitlab/import/import_failure_service_spec.rb
+++ b/spec/lib/gitlab/import/import_failure_service_spec.rb
@@ -7,58 +7,48 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
let_it_be(:project) { create(:project, :import_started, import_type: import_type) }
let(:exception) { StandardError.new('some error') }
- let(:arguments) { { project_id: project.id } }
- let(:base_arguments) { { error_source: 'SomeImporter', exception: exception }.merge(arguments) }
- let(:exe_arguments) { { fail_import: false, metrics: false } }
+ let(:import_state) { nil }
+ let(:fail_import) { false }
+ let(:metrics) { false }
+
+ let(:arguments) do
+ {
+ project_id: project.id,
+ error_source: 'SomeImporter',
+ exception: exception,
+ fail_import: fail_import,
+ metrics: metrics,
+ import_state: import_state
+ }
+ end
describe '.track' do
+ let(:instance) { double(:failure_service) }
+
context 'with all arguments provided' do
- let(:instance) { double(:failure_service) }
- let(:instance_arguments) do
+ let(:arguments) do
{
exception: exception,
import_state: '_import_state_',
project_id: '_project_id_',
- error_source: '_error_source_'
- }
- end
-
- let(:exe_arguments) do
- {
+ error_source: '_error_source_',
fail_import: '_fail_import_',
metrics: '_metrics_'
}
end
it 'invokes a new instance and executes' do
- expect(described_class).to receive(:new).with(**instance_arguments).and_return(instance)
- expect(instance).to receive(:execute).with(**exe_arguments)
+ expect(described_class).to receive(:new).with(**arguments).and_return(instance)
+ expect(instance).to receive(:execute)
- described_class.track(**instance_arguments.merge(exe_arguments))
+ described_class.track(**arguments)
end
end
context 'with only necessary arguments utilizing defaults' do
- let(:instance) { double(:failure_service) }
- let(:instance_arguments) do
- {
- exception: exception,
- import_state: nil,
- project_id: nil,
- error_source: nil
- }
- end
-
- let(:exe_arguments) do
- {
- fail_import: false,
- metrics: false
- }
- end
-
it 'invokes a new instance and executes' do
- expect(described_class).to receive(:new).with(**instance_arguments).and_return(instance)
- expect(instance).to receive(:execute).with(**exe_arguments)
+ expect(described_class).to receive(:new).with(a_hash_including(exception: exception)).and_return(instance)
+ expect(instance).to receive(:execute)
described_class.track(exception: exception)
end
@@ -66,7 +56,7 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
end
describe '#execute' do
- subject(:service) { described_class.new(**base_arguments) }
+ subject(:service) { described_class.new(**arguments) }
shared_examples 'logs the exception and fails the import' do
it 'when the failure does not abort the import' do
@@ -89,13 +79,14 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
source: 'SomeImporter'
)
- service.execute(**exe_arguments)
+ service.execute
expect(project.import_state.reload.status).to eq('failed')
expect(project.import_failures).not_to be_empty
expect(project.import_failures.last.exception_class).to eq('StandardError')
expect(project.import_failures.last.exception_message).to eq('some error')
+ expect(project.import_failures.last.retry_count).to eq(0)
end
end
@@ -120,32 +111,36 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
source: 'SomeImporter'
)
- service.execute(**exe_arguments)
+ service.execute
expect(project.import_state.reload.status).to eq('started')
expect(project.import_failures).not_to be_empty
expect(project.import_failures.last.exception_class).to eq('StandardError')
expect(project.import_failures.last.exception_message).to eq('some error')
+ expect(project.import_failures.last.retry_count).to eq(nil)
end
end
context 'when tracking metrics' do
- let(:exe_arguments) { { fail_import: false, metrics: true } }
+ let(:metrics) { true }
it 'tracks the failed import' do
- metrics = double(:metrics)
+ metrics_double = double(:metrics)
- expect(Gitlab::Import::Metrics).to receive(:new).with("#{project.import_type}_importer", project).and_return(metrics)
- expect(metrics).to receive(:track_failed_import)
+ expect(Gitlab::Import::Metrics)
+ .to receive(:new)
+ .with("#{project.import_type}_importer", project)
+ .and_return(metrics_double)
+ expect(metrics_double).to receive(:track_failed_import)
- service.execute(**exe_arguments)
+ service.execute
end
end
context 'when using the project as reference' do
context 'when it fails the import' do
- let(:exe_arguments) { { fail_import: true, metrics: false } }
+ let(:fail_import) { true }
it_behaves_like 'logs the exception and fails the import'
end
@@ -156,10 +151,10 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do
end
context 'when using the import_state as reference' do
- let(:arguments) { { import_state: project.import_state } }
+ let(:import_state) { project.import_state }
context 'when it fails the import' do
- let(:exe_arguments) { { fail_import: true, metrics: false } }
+ let(:fail_import) { true }
it_behaves_like 'logs the exception and fails the import'
end
diff --git a/spec/lib/gitlab/import/set_async_jid_spec.rb b/spec/lib/gitlab/import/set_async_jid_spec.rb
index 6931a7a953d..016f7cac61a 100644
--- a/spec/lib/gitlab/import/set_async_jid_spec.rb
+++ b/spec/lib/gitlab/import/set_async_jid_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Import::SetAsyncJid do
it 'sets the JID in Redis' do
expect(Gitlab::SidekiqStatus)
.to receive(:set)
- .with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
+ .with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION, value: 2)
.and_call_original
described_class.set_jid(project.import_state)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index b474f5825fd..7ed80cbcf66 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -61,6 +61,7 @@ issues:
- pending_escalations
- customer_relations_contacts
- issue_customer_relations_contacts
+- email
work_item_type:
- issues
events:
@@ -197,6 +198,7 @@ merge_requests:
- system_note_metadata
- note_authors
- cleanup_schedule
+- compliance_violations
external_pull_requests:
- project
merge_request_diff:
@@ -223,6 +225,7 @@ ci_pipelines:
- ci_ref
- stages
- statuses
+- statuses_order_id_desc
- latest_statuses_ordered_by_stage
- builds
- bridges
@@ -596,6 +599,8 @@ project:
- security_scans
- ci_feature_usages
- bulk_import_exports
+- ci_project_mirror
+- sync_events
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
index 8ae387d95e3..c748f966463 100644
--- a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
+++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
@@ -140,6 +140,7 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do
:zoom_meetings | true
:issues | true
:group_members | true
+ :project | true
end
with_them do
@@ -150,7 +151,11 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do
describe 'included_attributes for Project' do
subject { described_class.new }
- additional_attributes = { user: %w[id] }
+ # these are attributes for which either a special exception is made or are available only via included modules and not attribute introspection
+ additional_attributes = {
+ user: %w[id],
+ project: %w[auto_devops_deploy_strategy auto_devops_enabled issues_enabled jobs_enabled merge_requests_enabled snippets_enabled wiki_enabled build_git_strategy build_enabled security_and_compliance_enabled requirements_enabled]
+ }
Gitlab::ImportExport::Config.new.to_h[:included_attributes].each do |relation_sym, permitted_attributes|
context "for #{relation_sym}" do
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 6bb6be07749..1d8b137c196 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
def match_mr1_note(content_regex)
- MergeRequest.find_by(title: 'MR1').notes.select { |n| n.note.match(/#{content_regex}/)}.first
+ MergeRequest.find_by(title: 'MR1').notes.find { |n| n.note.match(/#{content_regex}/) }
end
RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
context 'for an Issue' do
it 'does not import note_html' do
note_content = 'Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi'
- issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.select { |n| n.note.match(/#{note_content}/)}.first
+ issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.find { |n| n.note.match(/#{note_content}/) }
expect(issue_note.note_html).to match(/#{note_content}/)
end
@@ -552,7 +552,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
it 'issue system note metadata restored successfully' do
note_content = 'created merge request !1 to address this issue'
- note = project.issues.first.notes.select { |n| n.note.match(/#{note_content}/)}.first
+ note = project.issues.first.notes.find { |n| n.note.match(/#{note_content}/)}
expect(note.noteable_type).to eq('Issue')
expect(note.system).to eq(true)
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 9daa3b32fd1..6ffe2187466 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -33,6 +33,7 @@ Issue:
- health_status
- external_key
- issue_type
+- email_message_id
Event:
- id
- target_type
@@ -562,6 +563,7 @@ Project:
- autoclose_referenced_issues
- suggestion_commit_message
- merge_commit_template
+- squash_commit_template
ProjectTracingSetting:
- external_url
Author:
diff --git a/spec/lib/gitlab/lets_encrypt/client_spec.rb b/spec/lib/gitlab/lets_encrypt/client_spec.rb
index 54b9bd3bfba..f1284318687 100644
--- a/spec/lib/gitlab/lets_encrypt/client_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/client_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe ::Gitlab::LetsEncrypt::Client do
subject(:new_order) { client.new_order('example.com') }
before do
- order_double = instance_double('Acme::Order')
+ order_double = double('Acme::Order')
allow(stub_client).to receive(:new_order).and_return(order_double)
end
@@ -107,7 +107,7 @@ RSpec.describe ::Gitlab::LetsEncrypt::Client do
subject { client.load_challenge(url) }
before do
- acme_challenge = instance_double('Acme::Client::Resources::Challenge')
+ acme_challenge = double('Acme::Client::Resources::Challenge')
allow(stub_client).to receive(:challenge).with(url: url).and_return(acme_challenge)
end
diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb
index a4ae39a835a..d8f351bb8a3 100644
--- a/spec/lib/gitlab/lograge/custom_options_spec.rb
+++ b/spec/lib/gitlab/lograge/custom_options_spec.rb
@@ -95,5 +95,55 @@ RSpec.describe Gitlab::Lograge::CustomOptions do
expect(subject[correlation_id_key]).to eq('123456')
end
end
+
+ context 'when feature flags are present', :request_store do
+ before do
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+
+ definitions = {}
+ [:enabled_feature, :disabled_feature].each do |flag_name|
+ definitions[flag_name] = Feature::Definition.new("development/enabled_feature.yml",
+ name: flag_name,
+ type: 'development',
+ log_state_changes: true,
+ default_enabled: false)
+
+ allow(Feature).to receive(:log_feature_flag_states?).with(flag_name).and_call_original
+ end
+
+ allow(Feature::Definition).to receive(:definitions).and_return(definitions)
+
+ Feature.enable(:enabled_feature)
+ Feature.disable(:disabled_feature)
+ end
+
+ context 'and :feature_flag_log_states is enabled' do
+ before do
+ Feature.enable(:feature_flag_state_logs)
+ end
+
+ it 'adds feature flag events' do
+ Feature.enabled?(:enabled_feature)
+ Feature.enabled?(:disabled_feature)
+
+ expect(subject).to have_key(:feature_flag_states)
+ expect(subject[:feature_flag_states]).to match_array(%w[enabled_feature:1 disabled_feature:0])
+ end
+ end
+
+ context 'and :feature_flag_log_states is disabled' do
+ before do
+ Feature.disable(:feature_flag_state_logs)
+ end
+
+ it 'does not track or add feature flag events' do
+ Feature.enabled?(:enabled_feature)
+ Feature.enabled?(:disabled_feature)
+
+ expect(subject).not_to have_key(:feature_flag_states)
+ expect(Feature).not_to receive(:log_feature_flag_state)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
new file mode 100644
index 00000000000..65c76aac10c
--- /dev/null
+++ b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
@@ -0,0 +1,290 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
+ let(:merge_commit_template) { nil }
+ let(:squash_commit_template) { nil }
+ let(:project) do
+ create(
+ :project,
+ :public,
+ :repository,
+ merge_commit_template: merge_commit_template,
+ squash_commit_template: squash_commit_template
+ )
+ end
+
+ let(:user) { project.creator }
+ let(:source_branch) { 'feature' }
+ let(:merge_request_description) { "Merge Request Description\nNext line" }
+ let(:merge_request_title) { 'Bugfix' }
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :simple,
+ source_project: project,
+ target_project: project,
+ target_branch: 'master',
+ source_branch: source_branch,
+ author: user,
+ description: merge_request_description,
+ title: merge_request_title
+ )
+ end
+
+ subject { described_class.new(merge_request: merge_request) }
+
+ shared_examples_for 'commit message with template' do |message_template_name|
+ it 'returns nil when template is not set in target project' do
+ expect(result_message).to be_nil
+ end
+
+ context 'when project has custom commit template' do
+ let(message_template_name) { <<~MSG.rstrip }
+ %{title}
+
+ See merge request %{reference}
+ MSG
+
+ it 'uses custom template' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+ end
+
+ context 'when project has commit template with closed issues' do
+ let(message_template_name) { <<~MSG.rstrip }
+ Merge branch '%{source_branch}' into '%{target_branch}'
+
+ %{title}
+
+ %{issues}
+
+ See merge request %{reference}
+ MSG
+
+ it 'omits issues and new lines when no issues are mentioned in description' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+
+ context 'when MR closes issues' do
+ let(:issue_1) { create(:issue, project: project) }
+ let(:issue_2) { create(:issue, project: project) }
+ let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" }
+
+ it 'includes them and keeps new line characters' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ Closes #{issue_1.to_reference} and #{issue_2.to_reference}
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+ end
+ end
+
+ context 'when project has commit template with description' do
+ let(message_template_name) { <<~MSG.rstrip }
+ Merge branch '%{source_branch}' into '%{target_branch}'
+
+ %{title}
+
+ %{description}
+
+ See merge request %{reference}
+ MSG
+
+ it 'uses template' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ Merge Request Description
+ Next line
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+
+ context 'when description is empty string' do
+ let(:merge_request_description) { '' }
+
+ it 'skips description placeholder and removes new line characters before it' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+ end
+
+ context 'when description is nil' do
+ let(:merge_request_description) { nil }
+
+ it 'skips description placeholder and removes new line characters before it' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+ end
+
+ context 'when description is blank string' do
+ let(:merge_request_description) { "\n\r \n" }
+
+ it 'skips description placeholder and removes new line characters before it' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+ end
+ end
+
+ context 'when custom commit template contains placeholder in the middle or beginning of the line' do
+ let(message_template_name) { <<~MSG.rstrip }
+ Merge branch '%{source_branch}' into '%{target_branch}'
+
+ %{description} %{title}
+
+ See merge request %{reference}
+ MSG
+
+ it 'uses custom template' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Merge Request Description
+ Next line Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+
+ context 'when description is empty string' do
+ let(:merge_request_description) { '' }
+
+ it 'does not remove new line characters before empty placeholder' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+ end
+ end
+
+ context 'when project has template with CRLF newlines' do
+ let(message_template_name) do
+ "Merge branch '%{source_branch}' into '%{target_branch}'\r\n\r\n%{title}\r\n\r\n%{description}\r\n\r\nSee merge request %{reference}"
+ end
+
+ it 'converts it to LF newlines' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ Merge Request Description
+ Next line
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+
+ context 'when description is empty string' do
+ let(:merge_request_description) { '' }
+
+ it 'skips description placeholder and removes new line characters before it' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Bugfix
+
+ See merge request #{merge_request.to_reference(full: true)}
+ MSG
+ end
+ end
+
+ context 'when project has merge commit template with first_commit' do
+ let(message_template_name) { <<~MSG.rstrip }
+ Message: %{first_commit}
+ MSG
+
+ it 'uses first commit' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Message: Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ MSG
+ end
+
+ context 'when branch has no unmerged commits' do
+ let(:source_branch) { 'v1.1.0' }
+
+ it 'is an empty string' do
+ expect(result_message).to eq 'Message: '
+ end
+ end
+ end
+
+ context 'when project has merge commit template with first_multiline_commit' do
+ let(message_template_name) { <<~MSG.rstrip }
+ Message: %{first_multiline_commit}
+ MSG
+
+ it 'uses first multiline commit' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Message: Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ MSG
+ end
+
+ context 'when branch has no multiline commits' do
+ let(:source_branch) { 'spooky-stuff' }
+
+ it 'is mr title' do
+ expect(result_message).to eq 'Message: Bugfix'
+ end
+ end
+ end
+ end
+ end
+
+ describe '#merge_message' do
+ let(:result_message) { subject.merge_message }
+
+ it_behaves_like 'commit message with template', :merge_commit_template
+ end
+
+ describe '#squash_message' do
+ let(:result_message) { subject.squash_message }
+
+ it_behaves_like 'commit message with template', :squash_commit_template
+ end
+end
diff --git a/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb b/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb
deleted file mode 100644
index 884f8df5e56..00000000000
--- a/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb
+++ /dev/null
@@ -1,219 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::MergeRequests::MergeCommitMessage do
- let(:merge_commit_template) { nil }
- let(:project) { create(:project, :public, :repository, merge_commit_template: merge_commit_template) }
- let(:user) { project.creator }
- let(:merge_request_description) { "Merge Request Description\nNext line" }
- let(:merge_request_title) { 'Bugfix' }
- let(:merge_request) do
- create(
- :merge_request,
- :simple,
- source_project: project,
- target_project: project,
- author: user,
- description: merge_request_description,
- title: merge_request_title
- )
- end
-
- subject { described_class.new(merge_request: merge_request) }
-
- it 'returns nil when template is not set in target project' do
- expect(subject.message).to be_nil
- end
-
- context 'when project has custom merge commit template' do
- let(:merge_commit_template) { <<~MSG.rstrip }
- %{title}
-
- See merge request %{reference}
- MSG
-
- it 'uses custom template' do
- expect(subject.message).to eq <<~MSG.rstrip
- Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
- end
-
- context 'when project has merge commit template with closed issues' do
- let(:merge_commit_template) { <<~MSG.rstrip }
- Merge branch '%{source_branch}' into '%{target_branch}'
-
- %{title}
-
- %{issues}
-
- See merge request %{reference}
- MSG
-
- it 'omits issues and new lines when no issues are mentioned in description' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
-
- context 'when MR closes issues' do
- let(:issue_1) { create(:issue, project: project) }
- let(:issue_2) { create(:issue, project: project) }
- let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" }
-
- it 'includes them and keeps new line characters' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- Closes #{issue_1.to_reference} and #{issue_2.to_reference}
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
- end
- end
-
- context 'when project has merge commit template with description' do
- let(:merge_commit_template) { <<~MSG.rstrip }
- Merge branch '%{source_branch}' into '%{target_branch}'
-
- %{title}
-
- %{description}
-
- See merge request %{reference}
- MSG
-
- it 'uses template' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- Merge Request Description
- Next line
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
-
- context 'when description is empty string' do
- let(:merge_request_description) { '' }
-
- it 'skips description placeholder and removes new line characters before it' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
- end
-
- context 'when description is nil' do
- let(:merge_request_description) { nil }
-
- it 'skips description placeholder and removes new line characters before it' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
- end
-
- context 'when description is blank string' do
- let(:merge_request_description) { "\n\r \n" }
-
- it 'skips description placeholder and removes new line characters before it' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
- end
- end
-
- context 'when custom merge commit template contains placeholder in the middle or beginning of the line' do
- let(:merge_commit_template) { <<~MSG.rstrip }
- Merge branch '%{source_branch}' into '%{target_branch}'
-
- %{description} %{title}
-
- See merge request %{reference}
- MSG
-
- it 'uses custom template' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Merge Request Description
- Next line Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
-
- context 'when description is empty string' do
- let(:merge_request_description) { '' }
-
- it 'does not remove new line characters before empty placeholder' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
- end
- end
-
- context 'when project has template with CRLF newlines' do
- let(:merge_commit_template) do
- "Merge branch '%{source_branch}' into '%{target_branch}'\r\n\r\n%{title}\r\n\r\n%{description}\r\n\r\nSee merge request %{reference}"
- end
-
- it 'converts it to LF newlines' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- Merge Request Description
- Next line
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
-
- context 'when description is empty string' do
- let(:merge_request_description) { '' }
-
- it 'skips description placeholder and removes new line characters before it' do
- expect(subject.message).to eq <<~MSG.rstrip
- Merge branch 'feature' into 'master'
-
- Bugfix
-
- See merge request #{merge_request.to_reference(full: true)}
- MSG
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
index e5475d04d86..2471faf76b2 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitlab_redis_cache do
subject(:redis_interface) { described_class.new }
let(:merge_check) { double(cache_key: '13') }
@@ -11,17 +11,17 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitla
describe '#save_check' do
it 'saves the hash' do
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get(expected_key) }).to be_nil
+ expect(Gitlab::Redis::Cache.with { |redis| redis.get(expected_key) }).to be_nil
redis_interface.save_check(merge_check: merge_check, result_hash: result_hash)
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get(expected_key) }).to eq result_hash.to_json
+ expect(Gitlab::Redis::Cache.with { |redis| redis.get(expected_key) }).to eq result_hash.to_json
end
end
describe '#retrieve_check' do
it 'returns the hash' do
- Gitlab::Redis::SharedState.with { |redis| redis.set(expected_key, result_hash.to_json) }
+ Gitlab::Redis::Cache.with { |redis| redis.set(expected_key, result_hash.to_json) }
expect(redis_interface.retrieve_check(merge_check: merge_check)).to eq result_hash
end
diff --git a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
index e4f85243528..9cd1ef4094e 100644
--- a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
- let(:exporter) { described_class.new }
- let(:log_filename) { File.join(Rails.root, 'log', 'sidekiq_exporter.log') }
let(:settings) { double('settings') }
+ let(:exporter) { described_class.new(settings) }
+ let(:log_filename) { File.join(Rails.root, 'log', 'sidekiq_exporter.log') }
before do
allow_any_instance_of(described_class).to receive(:log_filename).and_return(log_filename)
diff --git a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
index 01cf47a7c58..75bc3ba9626 100644
--- a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Metrics::Exporter::SidekiqExporter do
- let(:exporter) { described_class.new }
+ let(:exporter) { described_class.new(Settings.monitoring.sidekiq_exporter) }
after do
exporter.stop
@@ -50,40 +50,4 @@ RSpec.describe Gitlab::Metrics::Exporter::SidekiqExporter do
expect(exporter.log_filename).to end_with('sidekiq_exporter.log')
end
end
-
- context 'when port is already taken' do
- let(:first_exporter) { described_class.new }
-
- before do
- stub_config(
- monitoring: {
- sidekiq_exporter: {
- enabled: true,
- port: 9992,
- address: '127.0.0.1'
- }
- }
- )
-
- first_exporter.start
- end
-
- after do
- first_exporter.stop
- end
-
- it 'does print error message' do
- expect(Sidekiq.logger).to receive(:error)
- .with(
- class: described_class.to_s,
- message: 'Cannot start sidekiq_exporter',
- 'exception.message' => anything)
-
- exporter.start
- end
-
- it 'does not start thread' do
- expect(exporter.start).to be_nil
- end
- end
end
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index e97a4fdddcb..e8f8947c9e8 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -8,44 +8,169 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
it_behaves_like 'metrics sampler', 'DATABASE_SAMPLER'
describe '#sample' do
+ let(:main_labels) do
+ {
+ class: 'ActiveRecord::Base',
+ host: ApplicationRecord.database.config['host'],
+ port: ApplicationRecord.database.config['port'],
+ db_config_name: 'main'
+ }
+ end
+
+ let(:ci_labels) do
+ {
+ class: 'Ci::ApplicationRecord',
+ host: Ci::ApplicationRecord.database.config['host'],
+ port: Ci::ApplicationRecord.database.config['port'],
+ db_config_name: 'ci'
+ }
+ end
+
+ let(:main_replica_labels) do
+ {
+ class: 'ActiveRecord::Base',
+ host: 'main-replica-host',
+ port: 2345,
+ db_config_name: 'main_replica'
+ }
+ end
+
+ let(:ci_replica_labels) do
+ {
+ class: 'Ci::ApplicationRecord',
+ host: 'ci-replica-host',
+ port: 3456,
+ db_config_name: 'ci_replica'
+ }
+ end
+
before do
described_class::METRIC_DESCRIPTIONS.each_key do |metric|
allow(subject.metrics[metric]).to receive(:set)
end
+
+ allow(Gitlab::Database).to receive(:database_base_models)
+ .and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord })
end
- context 'for ActiveRecord::Base' do
- let(:labels) do
- {
- class: 'ActiveRecord::Base',
- host: ApplicationRecord.database.config['host'],
- port: ApplicationRecord.database.config['port']
- }
+ context 'when all base models are connected', :add_ci_connection do
+ it 'samples connection pool statistics for all primaries' do
+ expect_metrics_with_labels(main_labels)
+ expect_metrics_with_labels(ci_labels)
+
+ subject.sample
end
- context 'when the database is connected' do
- it 'samples connection pool statistics' do
- expect(subject.metrics[:size]).to receive(:set).with(labels, a_value >= 1)
- expect(subject.metrics[:connections]).to receive(:set).with(labels, a_value >= 1)
- expect(subject.metrics[:busy]).to receive(:set).with(labels, a_value >= 1)
- expect(subject.metrics[:dead]).to receive(:set).with(labels, a_value >= 0)
- expect(subject.metrics[:waiting]).to receive(:set).with(labels, a_value >= 0)
+ context 'when replica hosts are configured' do
+ let(:main_load_balancer) { ActiveRecord::Base.load_balancer } # rubocop:disable Database/MultipleDatabases
+ let(:main_replica_host) { main_load_balancer.host }
+
+ let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
+ let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') }
+ let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
+ let(:ci_replica_host) { double(:host, connection: ci_connection) }
+ let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
+
+ before do
+ allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer)
+ .and_return([main_load_balancer, ci_load_balancer].to_enum)
+
+ allow(main_load_balancer).to receive(:primary_only?).and_return(false)
+ allow(ci_load_balancer).to receive(:primary_only?).and_return(false)
+
+ allow(main_replica_host).to receive(:host).and_return('main-replica-host')
+ allow(ci_replica_host).to receive(:host).and_return('ci-replica-host')
+
+ allow(main_replica_host).to receive(:port).and_return(2345)
+ allow(ci_replica_host).to receive(:port).and_return(3456)
+
+ allow(Gitlab::Database).to receive(:db_config_name)
+ .with(main_replica_host.connection)
+ .and_return('main_replica')
+
+ allow(Gitlab::Database).to receive(:db_config_name)
+ .with(ci_replica_host.connection)
+ .and_return('ci_replica')
+ end
+
+ it 'samples connection pool statistics for primaries and replicas' do
+ expect_metrics_with_labels(main_labels)
+ expect_metrics_with_labels(ci_labels)
+ expect_metrics_with_labels(main_replica_labels)
+ expect_metrics_with_labels(ci_replica_labels)
subject.sample
end
end
+ end
+
+ context 'when a base model is not connected', :add_ci_connection do
+ before do
+ allow(Ci::ApplicationRecord).to receive(:connected?).and_return(false)
+ end
+
+ it 'records no samples for that primary' do
+ expect_metrics_with_labels(main_labels)
+ expect_no_metrics_with_labels(ci_labels)
+
+ subject.sample
+ end
+
+ context 'when the base model has replica connections' do
+ let(:main_load_balancer) { ActiveRecord::Base.load_balancer } # rubocop:disable Database/MultipleDatabases
+ let(:main_replica_host) { main_load_balancer.host }
+
+ let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
+ let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') }
+ let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
+ let(:ci_replica_host) { double(:host, connection: ci_connection) }
+ let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
- context 'when the database is not connected' do
before do
- allow(ActiveRecord::Base).to receive(:connected?).and_return(false)
+ allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer)
+ .and_return([main_load_balancer, ci_load_balancer].to_enum)
+
+ allow(main_load_balancer).to receive(:primary_only?).and_return(false)
+ allow(ci_load_balancer).to receive(:primary_only?).and_return(false)
+
+ allow(main_replica_host).to receive(:host).and_return('main-replica-host')
+ allow(ci_replica_host).to receive(:host).and_return('ci-replica-host')
+
+ allow(main_replica_host).to receive(:port).and_return(2345)
+ allow(ci_replica_host).to receive(:port).and_return(3456)
+
+ allow(Gitlab::Database).to receive(:db_config_name)
+ .with(main_replica_host.connection)
+ .and_return('main_replica')
+
+ allow(Gitlab::Database).to receive(:db_config_name)
+ .with(ci_replica_host.connection)
+ .and_return('ci_replica')
end
- it 'records no samples' do
- expect(subject.metrics[:size]).not_to receive(:set).with(labels, anything)
+ it 'still records the replica metrics' do
+ expect_metrics_with_labels(main_labels)
+ expect_metrics_with_labels(main_replica_labels)
+ expect_no_metrics_with_labels(ci_labels)
+ expect_metrics_with_labels(ci_replica_labels)
subject.sample
end
end
end
+
+ def expect_metrics_with_labels(labels)
+ expect(subject.metrics[:size]).to receive(:set).with(labels, a_value >= 1)
+ expect(subject.metrics[:connections]).to receive(:set).with(labels, a_value >= 1)
+ expect(subject.metrics[:busy]).to receive(:set).with(labels, a_value >= 1)
+ expect(subject.metrics[:dead]).to receive(:set).with(labels, a_value >= 0)
+ expect(subject.metrics[:waiting]).to receive(:set).with(labels, a_value >= 0)
+ end
+
+ def expect_no_metrics_with_labels(labels)
+ described_class::METRIC_DESCRIPTIONS.each_key do |metric|
+ expect(subject.metrics[metric]).not_to receive(:set).with(labels, anything)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index a8e4f039da4..389b0ef1044 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -198,6 +198,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
context 'query using a connection to a replica' do
before do
allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).and_return(:replica)
+ allow(connection).to receive_message_chain(:pool, :db_config, :name).and_return(db_config_name)
end
it 'queries connection db role' do
diff --git a/spec/lib/gitlab/multi_collection_paginator_spec.rb b/spec/lib/gitlab/multi_collection_paginator_spec.rb
index c7c8f4f969f..080b3382684 100644
--- a/spec/lib/gitlab/multi_collection_paginator_spec.rb
+++ b/spec/lib/gitlab/multi_collection_paginator_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Gitlab::MultiCollectionPaginator do
end
it 'fils the last page with elements from the second collection' do
- expected_collection = all_groups[-2..-1]
+ expected_collection = all_groups[-2..]
expect(paginator.paginate(3)).to eq(expected_collection)
end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index 3c14d91fdfd..1bed8e542a2 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
it do
- expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject
+ expect(subject).to eq(expected.reverse[1..]) # removing one item because we used it to calculate cursor data for the "last" page in subject
end
end
end
diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
index ffecbb06ff8..f8d50fbc517 100644
--- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb
+++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
context 'when the api_kaminari_count_with_limit feature flag is enabled' do
before do
- stub_feature_flags(api_kaminari_count_with_limit: true, lower_relation_max_count_limit: false)
+ stub_feature_flags(api_kaminari_count_with_limit: true)
end
context 'when resources count is less than MAX_COUNT_LIMIT' do
@@ -120,41 +120,6 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
end
end
- context 'when lower_relation_max_count_limit FF is enabled' do
- before do
- stub_feature_flags(lower_relation_max_count_limit: true)
- end
-
- it_behaves_like 'paginated response'
- it_behaves_like 'response with pagination headers'
-
- context 'when limit is met' do
- before do
- stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_NEW_LOWER_LIMIT", 2)
- end
-
- it_behaves_like 'paginated response'
-
- it 'does not return the X-Total and X-Total-Pages headers' do
- expect_no_header('X-Total')
- expect_no_header('X-Total-Pages')
- expect_header('X-Per-Page', '2')
- expect_header('X-Page', '1')
- expect_header('X-Next-Page', '2')
- expect_header('X-Prev-Page', '')
-
- expect_header('Link', anything) do |_key, val|
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
- expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next"))
- expect(val).not_to include('rel="last"')
- expect(val).not_to include('rel="prev"')
- end
-
- subject.paginate(resource)
- end
- end
- end
-
it 'does not return the total headers when excluding them' do
expect_no_header('X-Total')
expect_no_header('X-Total-Pages')
diff --git a/spec/lib/gitlab/patch/legacy_database_config_spec.rb b/spec/lib/gitlab/patch/legacy_database_config_spec.rb
index e6c0bdbf360..b87e16f31ae 100644
--- a/spec/lib/gitlab/patch/legacy_database_config_spec.rb
+++ b/spec/lib/gitlab/patch/legacy_database_config_spec.rb
@@ -11,6 +11,9 @@ RSpec.describe Gitlab::Patch::LegacyDatabaseConfig do
let(:configuration) { Rails::Application::Configuration.new(Rails.root) }
before do
+ allow(File).to receive(:exist?).and_call_original
+ allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false)
+
# The `AS::ConfigurationFile` calls `read` in `def initialize`
# thus we cannot use `expect_next_instance_of`
# rubocop:disable RSpec/AnyInstanceOf
diff --git a/spec/lib/gitlab/process_management_spec.rb b/spec/lib/gitlab/process_management_spec.rb
new file mode 100644
index 00000000000..a71a476b540
--- /dev/null
+++ b/spec/lib/gitlab/process_management_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require_relative '../../../lib/gitlab/process_management'
+
+RSpec.describe Gitlab::ProcessManagement do
+ describe '.trap_signals' do
+ it 'traps the given signals' do
+ expect(described_class).to receive(:trap).ordered.with(:INT)
+ expect(described_class).to receive(:trap).ordered.with(:HUP)
+
+ described_class.trap_signals(%i(INT HUP))
+ end
+ end
+
+ describe '.modify_signals' do
+ it 'traps the given signals with the given command' do
+ expect(described_class).to receive(:trap).ordered.with(:INT, 'DEFAULT')
+ expect(described_class).to receive(:trap).ordered.with(:HUP, 'DEFAULT')
+
+ described_class.modify_signals(%i(INT HUP), 'DEFAULT')
+ end
+ end
+
+ describe '.signal_processes' do
+ it 'sends a signal to every given process' do
+ expect(described_class).to receive(:signal).with(1, :INT)
+
+ described_class.signal_processes([1], :INT)
+ end
+ end
+
+ describe '.signal' do
+ it 'sends a signal to the given process' do
+ allow(Process).to receive(:kill).with(:INT, 4)
+ expect(described_class.signal(4, :INT)).to eq(true)
+ end
+
+ it 'returns false when the process does not exist' do
+ allow(Process).to receive(:kill).with(:INT, 4).and_raise(Errno::ESRCH)
+ expect(described_class.signal(4, :INT)).to eq(false)
+ end
+ end
+
+ describe '.wait_async' do
+ it 'waits for a process in a separate thread' do
+ thread = described_class.wait_async(Process.spawn('true'))
+
+ # Upon success Process.wait just returns the PID.
+ expect(thread.value).to be_a_kind_of(Numeric)
+ end
+ end
+
+ # In the X_alive? checks, we check negative PIDs sometimes as a simple way
+ # to be sure the pids are definitely for non-existent processes.
+ # Note that -1 is special, and sends the signal to every process we have permission
+ # for, so we use -2, -3 etc
+ describe '.all_alive?' do
+ it 'returns true if all processes are alive' do
+ processes = [Process.pid]
+
+ expect(described_class.all_alive?(processes)).to eq(true)
+ end
+
+ it 'returns false when a thread was not alive' do
+ processes = [-2]
+
+ expect(described_class.all_alive?(processes)).to eq(false)
+ end
+ end
+
+ describe '.process_alive?' do
+ it 'returns true if the process is alive' do
+ process = Process.pid
+
+ expect(described_class.process_alive?(process)).to eq(true)
+ end
+
+ it 'returns false when a thread was not alive' do
+ process = -2
+
+ expect(described_class.process_alive?(process)).to eq(false)
+ end
+
+ it 'returns false when no pid is given' do
+ process = nil
+
+ expect(described_class.process_alive?(process)).to eq(false)
+ end
+ end
+
+ describe '.process_died?' do
+ it 'returns false if the process is alive' do
+ process = Process.pid
+
+ expect(described_class.process_died?(process)).to eq(false)
+ end
+
+ it 'returns true when a thread was not alive' do
+ process = -2
+
+ expect(described_class.process_died?(process)).to eq(true)
+ end
+
+ it 'returns true when no pid is given' do
+ process = nil
+
+ expect(described_class.process_died?(process)).to eq(true)
+ end
+ end
+
+ describe '.pids_alive' do
+ it 'returns the pids that are alive, from a given array' do
+ pids = [Process.pid, -2]
+
+ expect(described_class.pids_alive(pids)).to match_array([Process.pid])
+ end
+ end
+
+ describe '.any_alive?' do
+ it 'returns true if at least one process is alive' do
+ processes = [Process.pid, -2]
+
+ expect(described_class.any_alive?(processes)).to eq(true)
+ end
+
+ it 'returns false when all threads are dead' do
+ processes = [-2, -3]
+
+ expect(described_class.any_alive?(processes)).to eq(false)
+ end
+ end
+
+ describe '.write_pid' do
+ it 'writes the PID of the current process to the given file' do
+ handle = double(:handle)
+
+ allow(File).to receive(:open).with('/dev/null', 'w').and_yield(handle)
+
+ expect(handle).to receive(:write).with(Process.pid.to_s)
+
+ described_class.write_pid('/dev/null')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/quick_actions/dsl_spec.rb b/spec/lib/gitlab/quick_actions/dsl_spec.rb
index f990abfb253..942d347424f 100644
--- a/spec/lib/gitlab/quick_actions/dsl_spec.rb
+++ b/spec/lib/gitlab/quick_actions/dsl_spec.rb
@@ -96,8 +96,8 @@ RSpec.describe Gitlab::QuickActions::Dsl do
expect(dynamic_description_def.name).to eq(:dynamic_description)
expect(dynamic_description_def.aliases).to eq([])
- expect(dynamic_description_def.to_h(OpenStruct.new(noteable: 'issue'))[:description]).to eq('A dynamic description for ISSUE')
- expect(dynamic_description_def.execute_message(OpenStruct.new(noteable: 'issue'), 'arg')).to eq('A dynamic execution message for ISSUE passing arg')
+ expect(dynamic_description_def.to_h(double('desc', noteable: 'issue'))[:description]).to eq('A dynamic description for ISSUE')
+ expect(dynamic_description_def.execute_message(double('desc', noteable: 'issue'), 'arg')).to eq('A dynamic execution message for ISSUE passing arg')
expect(dynamic_description_def.params).to eq(['The first argument', 'The second argument'])
expect(dynamic_description_def.condition_block).to be_nil
expect(dynamic_description_def.types).to eq([])
diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb
index 8f03905e08d..39ea02bad8b 100644
--- a/spec/lib/gitlab/rack_attack_spec.rb
+++ b/spec/lib/gitlab/rack_attack_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::RackAttack, :aggregate_failures do
describe '.configure' do
let(:fake_rack_attack) { class_double("Rack::Attack") }
- let(:fake_rack_attack_request) { class_double("Rack::Attack::Request") }
- let(:fake_cache) { instance_double("Rack::Attack::Cache") }
+ let(:fake_rack_attack_request) { class_double(Rack::Attack::Request) }
+ let(:fake_cache) { instance_double(Rack::Attack::Cache) }
let(:throttles) do
{
@@ -27,9 +27,6 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
end
before do
- stub_const("Rack::Attack", fake_rack_attack)
- stub_const("Rack::Attack::Request", fake_rack_attack_request)
-
allow(fake_rack_attack).to receive(:throttled_response=)
allow(fake_rack_attack).to receive(:throttle)
allow(fake_rack_attack).to receive(:track)
@@ -37,6 +34,9 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
allow(fake_rack_attack).to receive(:blocklist)
allow(fake_rack_attack).to receive(:cache).and_return(fake_cache)
allow(fake_cache).to receive(:store=)
+
+ fake_rack_attack.const_set('Request', fake_rack_attack_request)
+ stub_const("Rack::Attack", fake_rack_attack)
end
it 'extends the request class' do
@@ -78,7 +78,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
it 'configures tracks and throttles with a selected set of dry-runs' do
dry_run_throttles = throttles.each_key.first(2)
- regular_throttles = throttles.keys[2..-1]
+ regular_throttles = throttles.keys[2..]
stub_env('GITLAB_THROTTLE_DRY_RUN', dry_run_throttles.join(','))
described_class.configure(fake_rack_attack)
diff --git a/spec/lib/gitlab/rate_limit_helpers_spec.rb b/spec/lib/gitlab/rate_limit_helpers_spec.rb
deleted file mode 100644
index ad0e2de1448..00000000000
--- a/spec/lib/gitlab/rate_limit_helpers_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_rate_limiting do
- let(:limiter_class) do
- Class.new do
- include ::Gitlab::RateLimitHelpers
-
- attr_reader :request
-
- def initialize(request)
- @request = request
- end
- end
- end
-
- let(:request) { instance_double(ActionDispatch::Request, request_method: 'GET', ip: '127.0.0.1', fullpath: '/') }
- let(:class_instance) { limiter_class.new(request) }
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
-
- describe '#archive_rate_limit_reached?' do
- context 'with a user' do
- it 'rate limits the user properly' do
- 5.times do
- expect(class_instance.archive_rate_limit_reached?(user, project)).to be_falsey
- end
-
- expect(class_instance.archive_rate_limit_reached?(user, project)).to be_truthy
- end
- end
-
- context 'with an anonymous user' do
- before do
- stub_const('Gitlab::RateLimitHelpers::ARCHIVE_RATE_ANONYMOUS_THRESHOLD', 2)
- end
-
- it 'rate limits with higher limits' do
- 2.times do
- expect(class_instance.archive_rate_limit_reached?(nil, project)).to be_falsey
- end
-
- expect(class_instance.archive_rate_limit_reached?(nil, project)).to be_truthy
- expect(class_instance.archive_rate_limit_reached?(user, project)).to be_falsey
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index bf1bf65bb9b..76731bb916c 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -27,6 +27,11 @@ RSpec.describe Gitlab::Redis::MultiStore do
subject { multi_store.send(name, *args) }
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
after(:all) do
primary_store.flushdb
secondary_store.flushdb
@@ -48,6 +53,15 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
end
+ context 'when instance_name is nil' do
+ let(:instance_name) { nil }
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
+ end
+ end
+
context 'when primary_store is not a ::Redis instance' do
before do
allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
@@ -114,6 +128,12 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
RSpec.shared_examples_for 'fallback read from the secondary store' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
it 'fallback and execute on secondary instance' do
expect(secondary_store).to receive(name).with(*args).and_call_original
@@ -128,7 +148,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
it 'increment read fallback count metrics' do
- expect(multi_store).to receive(:increment_read_fallback_count).with(name)
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
subject
end
@@ -169,9 +189,9 @@ RSpec.describe Gitlab::Redis::MultiStore do
allow(secondary_store).to receive(name).and_call_original
end
- context 'with feature flag :use_multi_store enabled' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
before do
- stub_feature_flags(use_multi_store: true)
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
end
context 'when reading from the primary is successful' do
@@ -246,12 +266,38 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
end
- context 'with feature flag :use_multi_store is disabled' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
before do
- stub_feature_flags(use_multi_store: false)
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
end
- it_behaves_like 'secondary store'
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it_behaves_like 'secondary store'
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'execute on the primary instance' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+ end
end
context 'with both primary and secondary store using same redis instance' do
@@ -329,9 +375,9 @@ RSpec.describe Gitlab::Redis::MultiStore do
allow(secondary_store).to receive(name).and_call_original
end
- context 'with feature flag :use_multi_store enabled' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
before do
- stub_feature_flags(use_multi_store: true)
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
end
context 'when executing on primary instance is successful' do
@@ -382,35 +428,57 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
end
- context 'with feature flag :use_multi_store is disabled' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
before do
- stub_feature_flags(use_multi_store: false)
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
end
- it 'executes only on the secondary redis store', :aggregate_errors do
- expect(secondary_store).to receive(name).with(*expected_args)
- expect(primary_store).not_to receive(name).with(*expected_args)
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on the secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name).with(*expected_args)
+ expect(primary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
- subject
+ include_examples 'verify that store contains values', :secondary_store
end
- include_examples 'verify that store contains values', :secondary_store
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'executes only on the primary_redis redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args)
+ expect(secondary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ end
end
end
end
end
context 'with unsupported command' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
before do
primary_store.flushdb
secondary_store.flushdb
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
end
let_it_be(:key) { "redis:counter" }
- subject do
- multi_store.incr(key)
- end
+ subject { multi_store.incr(key) }
it 'executes method missing' do
expect(multi_store).to receive(:method_missing)
@@ -418,31 +486,75 @@ RSpec.describe Gitlab::Redis::MultiStore do
subject
end
- it 'logs MethodMissingError' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
- hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name)))
+ context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ it 'logs MethodMissingError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
+ hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name)))
- subject
+ subject
+ end
+
+ it 'increments method missing counter' do
+ expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
+
+ subject
+ end
end
- it 'increments method missing counter' do
- expect(multi_store).to receive(:increment_method_missing_count).with(:incr)
+ context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ subject { multi_store.info }
- subject
+ it 'does not log MethodMissingError' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ subject
+ end
+
+ it 'does not increment method missing counter' do
+ expect(counter).not_to receive(:increment)
+
+ subject
+ end
end
- it 'fallback and executes only on the secondary store', :aggregate_errors do
- expect(secondary_store).to receive(:incr).with(key).and_call_original
- expect(primary_store).not_to receive(:incr)
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(primary_store).to receive(:incr).with(key).and_call_original
+ expect(secondary_store).not_to receive(:incr)
- subject
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
+
+ expect(secondary_store.get(key)).to be_nil
+ expect(primary_store.get(key)).to eq('1')
+ end
end
- it 'correct value is stored on the secondary store', :aggregate_errors do
- subject
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(secondary_store).to receive(:incr).with(key).and_call_original
+ expect(primary_store).not_to receive(:incr)
+
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
- expect(primary_store.get(key)).to be_nil
- expect(secondary_store.get(key)).to eq('1')
+ expect(primary_store.get(key)).to be_nil
+ expect(secondary_store.get(key)).to eq('1')
+ end
end
context 'when the command is executed within pipelined block' do
@@ -468,6 +580,96 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
end
+ describe '#to_s' do
+ subject { multi_store.to_s }
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(secondary_store.to_s)
+ end
+ end
+ end
+ end
+
+ describe '#is_a?' do
+ it 'returns true for ::Redis::Store' do
+ expect(multi_store.is_a?(::Redis::Store)).to be true
+ end
+ end
+
+ describe '#use_primary_and_secondary_stores?' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ describe '#use_primary_store_as_default?' do
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be false
+ end
+ end
+ end
+
def create_redis_store(options, extras = {})
::Redis::Store.new(options.merge(extras))
end
diff --git a/spec/lib/gitlab/redis/sessions_spec.rb b/spec/lib/gitlab/redis/sessions_spec.rb
index 7e239c08e9f..6ecbbf3294d 100644
--- a/spec/lib/gitlab/redis/sessions_spec.rb
+++ b/spec/lib/gitlab/redis/sessions_spec.rb
@@ -3,5 +3,90 @@
require 'spec_helper'
RSpec.describe Gitlab::Redis::Sessions do
- include_examples "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState
+ it_behaves_like "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState
+
+ describe 'redis instance used in connection pool' do
+ before do
+ clear_pool
+ end
+
+ after do
+ clear_pool
+ end
+
+ context 'when redis.sessions configuration is not provided' do
+ it 'uses ::Redis instance' do
+ expect(described_class).to receive(:config_fallback?).and_return(true)
+
+ described_class.pool.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Redis)
+ end
+ end
+ end
+
+ context 'when redis.sessions configuration is provided' do
+ it 'instantiates an instance of MultiStore' do
+ expect(described_class).to receive(:config_fallback?).and_return(false)
+
+ described_class.pool.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+ end
+ end
+ end
+
+ def clear_pool
+ described_class.remove_instance_variable(:@pool)
+ rescue NameError
+ # raised if @pool was not set; ignore
+ end
+ end
+
+ describe '#store' do
+ subject(:store) { described_class.store(namespace: described_class::SESSION_NAMESPACE) }
+
+ context 'when redis.sessions configuration is NOT provided' do
+ it 'instantiates ::Redis instance' do
+ expect(described_class).to receive(:config_fallback?).and_return(true)
+ expect(store).to be_instance_of(::Redis::Store)
+ end
+ end
+
+ context 'when redis.sessions configuration is provided' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ before do
+ redis_clear_raw_config!(Gitlab::Redis::Sessions)
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ allow(described_class).to receive(:config_fallback?).and_return(false)
+ end
+
+ after do
+ redis_clear_raw_config!(Gitlab::Redis::Sessions)
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ end
+
+ # Check that Gitlab::Redis::Sessions is configured as MultiStore with proper attrs.
+ it 'instantiates an instance of MultiStore', :aggregate_failures do
+ expect(described_class).to receive(:config_file_name).and_return(config_new_format_host)
+ expect(::Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
+
+ expect(store).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(store.primary_store.to_s).to eq("Redis Client connected to test-host:6379 against DB 99 with namespace session:gitlab")
+ expect(store.secondary_store.to_s).to eq("Redis Client connected to /path/to/redis.sock against DB 0 with namespace session:gitlab")
+
+ expect(store.instance_name).to eq('Sessions')
+ end
+
+ context 'when MultiStore correctly configured' do
+ before do
+ allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(::Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sessions, :use_primary_store_as_default_for_sessions
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 05f1c88a6ab..83f85cc73d0 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -264,23 +264,37 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('1.2.3') }
end
- describe '.conan_recipe_component_regex' do
- subject { described_class.conan_recipe_component_regex }
-
- let(:fifty_one_characters) { 'f_a' * 17}
-
- it { is_expected.to match('foobar') }
- it { is_expected.to match('foo_bar') }
- it { is_expected.to match('foo+bar') }
- it { is_expected.to match('_foo+bar-baz+1.0') }
- it { is_expected.to match('1.0.0') }
- it { is_expected.not_to match('-foo_bar') }
- it { is_expected.not_to match('+foo_bar') }
- it { is_expected.not_to match('.foo_bar') }
- it { is_expected.not_to match('foo@bar') }
- it { is_expected.not_to match('foo/bar') }
- it { is_expected.not_to match('!!()()') }
- it { is_expected.not_to match(fifty_one_characters) }
+ context 'conan recipe components' do
+ shared_examples 'accepting valid recipe components values' do
+ let(:fifty_one_characters) { 'f_a' * 17}
+
+ it { is_expected.to match('foobar') }
+ it { is_expected.to match('foo_bar') }
+ it { is_expected.to match('foo+bar') }
+ it { is_expected.to match('_foo+bar-baz+1.0') }
+ it { is_expected.to match('1.0.0') }
+ it { is_expected.not_to match('-foo_bar') }
+ it { is_expected.not_to match('+foo_bar') }
+ it { is_expected.not_to match('.foo_bar') }
+ it { is_expected.not_to match('foo@bar') }
+ it { is_expected.not_to match('foo/bar') }
+ it { is_expected.not_to match('!!()()') }
+ it { is_expected.not_to match(fifty_one_characters) }
+ end
+
+ describe '.conan_recipe_component_regex' do
+ subject { described_class.conan_recipe_component_regex }
+
+ it_behaves_like 'accepting valid recipe components values'
+ it { is_expected.not_to match('_') }
+ end
+
+ describe '.conan_recipe_user_channel_regex' do
+ subject { described_class.conan_recipe_user_channel_regex }
+
+ it_behaves_like 'accepting valid recipe components values'
+ it { is_expected.to match('_') }
+ end
end
describe '.package_name_regex' do
diff --git a/spec/lib/gitlab/relative_positioning/range_spec.rb b/spec/lib/gitlab/relative_positioning/range_spec.rb
index c3386336493..da1f0166d5d 100644
--- a/spec/lib/gitlab/relative_positioning/range_spec.rb
+++ b/spec/lib/gitlab/relative_positioning/range_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::RelativePositioning::Range do
- item_a = OpenStruct.new(relative_position: 100, object: :x, positioned?: true)
- item_b = OpenStruct.new(relative_position: 200, object: :y, positioned?: true)
+ position_struct = Struct.new(:relative_position, :object, :positioned?)
+
+ item_a = position_struct.new(100, :x, true)
+ item_b = position_struct.new(200, :y, true)
before do
allow(item_a).to receive(:lhs_neighbour) { nil }
@@ -90,12 +92,12 @@ RSpec.describe Gitlab::RelativePositioning::Range do
end
describe '#cover?' do
- item_c = OpenStruct.new(relative_position: 150, object: :z, positioned?: true)
- item_d = OpenStruct.new(relative_position: 050, object: :w, positioned?: true)
- item_e = OpenStruct.new(relative_position: 250, object: :r, positioned?: true)
- item_f = OpenStruct.new(positioned?: false)
- item_ax = OpenStruct.new(relative_position: 100, object: :not_x, positioned?: true)
- item_bx = OpenStruct.new(relative_position: 200, object: :not_y, positioned?: true)
+ item_c = position_struct.new(150, :z, true)
+ item_d = position_struct.new(050, :w, true)
+ item_e = position_struct.new(250, :r, true)
+ item_f = position_struct.new(positioned?: false)
+ item_ax = position_struct.new(100, :not_x, true)
+ item_bx = position_struct.new(200, :not_y, true)
where(:lhs, :rhs, :item, :expected_result) do
[
diff --git a/spec/lib/gitlab/repository_archive_rate_limiter_spec.rb b/spec/lib/gitlab/repository_archive_rate_limiter_spec.rb
new file mode 100644
index 00000000000..49df70f3cb3
--- /dev/null
+++ b/spec/lib/gitlab/repository_archive_rate_limiter_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::RepositoryArchiveRateLimiter do
+ let(:described_class) do
+ Class.new do
+ include ::Gitlab::RepositoryArchiveRateLimiter
+
+ def check_rate_limit!(**args)
+ end
+ end
+ end
+
+ describe "#check_archive_rate_limit!" do
+ let(:project) { instance_double('Project') }
+ let(:current_user) { instance_double('User') }
+ let(:check) { subject.check_archive_rate_limit!(current_user, project) }
+
+ context 'when archive_rate_limit feature flag is disabled' do
+ before do
+ stub_feature_flags(archive_rate_limit: false)
+ end
+
+ it 'does not check rate limit' do
+ expect(subject).not_to receive(:check_rate_limit!)
+
+ expect(check).to eq nil
+ end
+ end
+
+ context 'when archive_rate_limit feature flag is enabled' do
+ before do
+ stub_feature_flags(archive_rate_limit: true)
+ end
+
+ context 'when current user exists' do
+ it 'checks for project_repositories_archive rate limiting with default threshold' do
+ expect(subject).to receive(:check_rate_limit!)
+ .with(:project_repositories_archive, scope: [project, current_user], threshold: nil)
+ check
+ end
+ end
+
+ context 'when current user does not exist' do
+ let(:current_user) { nil }
+
+ it 'checks for project_repositories_archive rate limiting with threshold 100' do
+ expect(subject).to receive(:check_rate_limit!)
+ .with(:project_repositories_archive, scope: [project, current_user], threshold: 100)
+ check
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/saas_spec.rb b/spec/lib/gitlab/saas_spec.rb
new file mode 100644
index 00000000000..1be36a60a97
--- /dev/null
+++ b/spec/lib/gitlab/saas_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Saas do
+ describe '.canary_toggle_com_url' do
+ subject { described_class.canary_toggle_com_url }
+
+ let(:next_url) { 'https://next.gitlab.com' }
+
+ it { is_expected.to eq(next_url) }
+ end
+end
diff --git a/spec/lib/gitlab/search/abuse_detection_spec.rb b/spec/lib/gitlab/search/abuse_detection_spec.rb
new file mode 100644
index 00000000000..a18d28456cd
--- /dev/null
+++ b/spec/lib/gitlab/search/abuse_detection_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Search::AbuseDetection do
+ subject { described_class.new(params) }
+
+ let(:params) {{ query_string: 'foobar' }}
+
+ describe 'abusive scopes validation' do
+ it 'allows only approved scopes' do
+ described_class::ALLOWED_SCOPES.each do |scope|
+ expect(described_class.new(scope: scope)).to be_valid
+ end
+ end
+
+ it 'disallows anything not approved' do
+ expect(described_class.new(scope: 'nope')).not_to be_valid
+ end
+ end
+
+ describe 'abusive character matching' do
+ refs = %w(
+ main
+ тест
+ maiñ
+ main123
+ main-v123
+ main-v12.3
+ feature/it_works
+ really_important!
+ 测试
+ )
+
+ refs.each do |ref|
+ it "does match refs permitted by git refname: #{ref}" do
+ [:repository_ref, :project_ref].each do |param|
+ validation = described_class.new(Hash[param, ref])
+ expect(validation).to be_valid
+ end
+ end
+
+ it "does NOT match refs with special characters: #{ref}" do
+ ['?', '\\', ' '].each do |special_character|
+ [:repository_ref, :project_ref].each do |param|
+ validation = described_class.new(Hash[param, ref + special_character])
+ expect(validation).not_to be_valid
+ end
+ end
+ end
+ end
+ end
+
+ describe 'numericality validation' do
+ it 'considers non Integers to be invalid' do
+ [:project_id, :group_id].each do |param|
+ [[1, 2, 3], 'xyz', 3.14, { foo: :bar }].each do |dtype|
+ expect(described_class.new(param => dtype)).not_to be_valid
+ end
+ end
+ end
+
+ it 'considers Integers to be valid' do
+ [:project_id, :group_id].each do |param|
+ expect(described_class.new(param => 123)).to be_valid
+ end
+ end
+ end
+
+ describe 'query_string validation' do
+ using ::RSpec::Parameterized::TableSyntax
+
+ subject { described_class.new(query_string: search) }
+
+ let(:validation_errors) do
+ subject.validate
+ subject.errors.messages
+ end
+
+ where(:search, :errors) do
+ described_class::STOP_WORDS.each do |word|
+ word | { query_string: ['stopword only abusive search detected'] }
+ end
+
+ 'x' | { query_string: ['abusive tiny search detected'] }
+ ('x' * described_class::ABUSIVE_TERM_SIZE) | { query_string: ['abusive term length detected'] }
+ '' | {}
+ '*' | {}
+ 'ruby' | {}
+ end
+
+ with_them do
+ it 'validates query string for pointless search' do
+ expect(validation_errors).to eq(errors)
+ end
+ end
+ end
+
+ describe 'abusive type coercion from string validation' do
+ it 'considers anything not a String invalid' do
+ [:query_string, :scope, :repository_ref, :project_ref].each do |param|
+ [[1, 2, 3], 123, 3.14, { foo: :bar }].each do |dtype|
+ expect(described_class.new(param => dtype)).not_to be_valid
+ end
+ end
+ end
+
+ it 'considers Strings to be valid' do
+ [:query_string, :repository_ref, :project_ref].each do |param|
+ expect(described_class.new(param => "foo")).to be_valid
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/search/abuse_validators/no_abusive_coercion_from_string_validator_spec.rb b/spec/lib/gitlab/search/abuse_validators/no_abusive_coercion_from_string_validator_spec.rb
new file mode 100644
index 00000000000..76280e65867
--- /dev/null
+++ b/spec/lib/gitlab/search/abuse_validators/no_abusive_coercion_from_string_validator_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Search::AbuseValidators::NoAbusiveCoercionFromStringValidator do
+ subject do
+ described_class.new({ attributes: { foo: :bar } })
+ end
+
+ let(:instance) { double(:instance) }
+ let(:attribute) { :attribute }
+ let(:validation_msg) { 'abusive coercion from string detected' }
+ let(:validate) { subject.validate_each(instance, attribute, attribute_value) }
+
+ using ::RSpec::Parameterized::TableSyntax
+
+ where(:attribute_value, :valid?) do
+ ['this is an arry'] | false
+ { 'this': 'is a hash' } | false
+ 123 | false
+ 456.78 | false
+ 'now this is a string' | true
+ end
+
+ with_them do
+ it do
+ if valid?
+ expect(instance).not_to receive(:errors)
+ else
+ expect(instance).to receive_message_chain(:errors, :add).with(attribute, validation_msg)
+ validate
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/search/abuse_validators/no_abusive_term_length_validator_spec.rb b/spec/lib/gitlab/search/abuse_validators/no_abusive_term_length_validator_spec.rb
new file mode 100644
index 00000000000..67409d9b628
--- /dev/null
+++ b/spec/lib/gitlab/search/abuse_validators/no_abusive_term_length_validator_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Search::AbuseValidators::NoAbusiveTermLengthValidator do
+ subject do
+ described_class.new({ attributes: { foo: :bar }, maximum: limit, maximum_for_url: url_limit })
+ end
+
+ let(:limit) { 100 }
+ let(:url_limit) { limit * 2 }
+ let(:instance) { double(:instance) }
+ let(:attribute) { :search }
+ let(:validation_msg) { 'abusive term length detected' }
+ let(:validate) { subject.validate_each(instance, attribute, search) }
+
+ context 'when a term is over the limit' do
+ let(:search) { "this search is too lo#{'n' * limit}g" }
+
+ it 'adds a validation error' do
+ expect(instance).to receive_message_chain(:errors, :add).with(attribute, validation_msg)
+ validate
+ end
+ end
+
+ context 'when all terms are under the limit' do
+ let(:search) { "what is love? baby don't hurt me" }
+
+ it 'does NOT add any validation errors' do
+ expect(instance).not_to receive(:errors)
+ validate
+ end
+ end
+
+ context 'when a URL is detected in a search term' do
+ let(:double_limit) { limit * 2 }
+ let(:terms) do
+ [
+ 'http://' + 'x' * (double_limit - 12) + '.com',
+ 'https://' + 'x' * (double_limit - 13) + '.com',
+ 'sftp://' + 'x' * (double_limit - 12) + '.com',
+ 'ftp://' + 'x' * (double_limit - 11) + '.com',
+ 'http://' + 'x' * (double_limit - 8) # no tld is OK
+ ]
+ end
+
+ context 'when under twice the limit' do
+ let(:search) { terms.join(' ') }
+
+ it 'does NOT add any validation errors' do
+ search.split.each do |term|
+ expect(term.length).to be < url_limit
+ end
+
+ expect(instance).not_to receive(:errors)
+ validate
+ end
+ end
+
+ context 'when over twice the limit' do
+ let(:search) do
+ terms.map { |t| t + 'xxxxxxxx' }.join(' ')
+ end
+
+ it 'adds a validation error' do
+ expect(instance).to receive_message_chain(:errors, :add).with(attribute, validation_msg)
+ validate
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/search/params_spec.rb b/spec/lib/gitlab/search/params_spec.rb
new file mode 100644
index 00000000000..6d15337b872
--- /dev/null
+++ b/spec/lib/gitlab/search/params_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Search::Params do
+ subject { described_class.new(params, detect_abuse: detect_abuse) }
+
+ let(:search) { 'search' }
+ let(:group_id) { 123 }
+ let(:params) { { group_id: 123, search: search } }
+ let(:detect_abuse) { true }
+
+ describe 'detect_abuse conditional' do
+ it 'does not call AbuseDetection' do
+ expect(Gitlab::Search::AbuseDetection).not_to receive(:new)
+ described_class.new(params, detect_abuse: false)
+ end
+
+ it 'uses AbuseDetection by default' do
+ expect(Gitlab::Search::AbuseDetection).to receive(:new).and_call_original
+ described_class.new(params)
+ end
+ end
+
+ describe '#[]' do
+ it 'feels like regular params' do
+ expect(subject[:group_id]).to eq(params[:group_id])
+ end
+
+ it 'has indifferent access' do
+ params = described_class.new({ 'search' => search, group_id: group_id })
+ expect(params['group_id']).to eq(group_id)
+ expect(params[:search]).to eq(search)
+ end
+
+ it 'also works on attr_reader attributes' do
+ expect(subject[:query_string]).to eq(subject.query_string)
+ end
+ end
+
+ describe '#query_string' do
+ let(:term) { 'term' }
+
+ it "uses 'search' parameter" do
+ params = described_class.new({ search: search })
+ expect(params.query_string).to eq(search)
+ end
+
+ it "uses 'term' parameter" do
+ params = described_class.new({ term: term })
+ expect(params.query_string).to eq(term)
+ end
+
+ it "prioritizes 'search' over 'term'" do
+ params = described_class.new({ search: search, term: term })
+ expect(params.query_string).to eq(search)
+ end
+
+ it 'strips surrounding whitespace from query string' do
+ params = described_class.new({ search: ' ' + search + ' ' })
+ expect(params.query_string).to eq(search)
+ end
+ end
+
+ describe '#validate' do
+ context 'when detect_abuse is disabled' do
+ let(:detect_abuse) { false }
+
+ it 'does NOT validate AbuseDetector' do
+ expect(Gitlab::Search::AbuseDetection).not_to receive(:new)
+ subject.validate
+ end
+ end
+
+ it 'validates AbuseDetector on validation' do
+ expect(Gitlab::Search::AbuseDetection).to receive(:new).and_call_original
+ subject.validate
+ end
+ end
+
+ describe '#valid?' do
+ context 'when detect_abuse is disabled' do
+ let(:detect_abuse) { false }
+
+ it 'does NOT validate AbuseDetector' do
+ expect(Gitlab::Search::AbuseDetection).not_to receive(:new)
+ subject.valid?
+ end
+ end
+
+ it 'validates AbuseDetector on validation' do
+ expect(Gitlab::Search::AbuseDetection).to receive(:new).and_call_original
+ subject.valid?
+ end
+ end
+
+ describe 'abuse detection' do
+ let(:abuse_detection) { instance_double(Gitlab::Search::AbuseDetection) }
+
+ before do
+ allow(subject).to receive(:abuse_detection).and_return abuse_detection
+ allow(abuse_detection).to receive(:errors).and_return abuse_errors
+ end
+
+ context 'when there are abuse validation errors' do
+ let(:abuse_errors) { { foo: ['bar'] } }
+
+ it 'is considered abusive' do
+ expect(subject).to be_abusive
+ end
+ end
+
+ context 'when there are NOT any abuse validation errors' do
+ let(:abuse_errors) { {} }
+
+ context 'and there are other validation errors' do
+ it 'is NOT considered abusive' do
+ allow(subject).to receive(:valid?) do
+ subject.errors.add :project_id, 'validation error unrelated to abuse'
+ false
+ end
+
+ expect(subject).not_to be_abusive
+ end
+ end
+
+ context 'and there are NO other validation errors' do
+ it 'is NOT considered abusive' do
+ allow(subject).to receive(:valid?).and_return(true)
+
+ expect(subject).not_to be_abusive
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb
new file mode 100644
index 00000000000..0af029968e8
--- /dev/null
+++ b/spec/lib/gitlab/security/scan_configuration_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Security::ScanConfiguration do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:scan) { described_class.new(project: project, type: type, configured: configured) }
+
+ describe '#available?' do
+ subject { scan.available? }
+
+ let(:configured) { true }
+
+ context 'with a core scanner' do
+ let(:type) { :sast }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with custom scanner' do
+ let(:type) { :my_scanner }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#configured?' do
+ subject { scan.configured? }
+
+ let(:type) { :sast }
+ let(:configured) { false }
+
+ it { is_expected.to be_falsey }
+ end
+
+ describe '#configuration_path' do
+ subject { scan.configuration_path }
+
+ let(:configured) { true }
+
+ context 'with a non configurable scanner' do
+ let(:type) { :secret_detection }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with licensed scanner for FOSS environment' do
+ let(:type) { :dast }
+
+ before do
+ stub_env('FOSS_ONLY', '1')
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with custom scanner' do
+ let(:type) { :my_scanner }
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_enq_spec.rb b/spec/lib/gitlab/sidekiq_enq_spec.rb
deleted file mode 100644
index 6903f01bf5f..00000000000
--- a/spec/lib/gitlab/sidekiq_enq_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::SidekiqEnq, :clean_gitlab_redis_queues do
- let(:retry_set) { Sidekiq::Scheduled::SETS.first }
- let(:schedule_set) { Sidekiq::Scheduled::SETS.last }
-
- around do |example|
- freeze_time { example.run }
- end
-
- shared_examples 'finds jobs that are due and enqueues them' do
- before do
- Sidekiq.redis do |redis|
- redis.zadd(retry_set, (Time.current - 1.day).to_f.to_s, '{"jid": 1}')
- redis.zadd(retry_set, Time.current.to_f.to_s, '{"jid": 2}')
- redis.zadd(retry_set, (Time.current + 1.day).to_f.to_s, '{"jid": 3}')
-
- redis.zadd(schedule_set, (Time.current - 1.day).to_f.to_s, '{"jid": 4}')
- redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 5}')
- redis.zadd(schedule_set, (Time.current + 1.day).to_f.to_s, '{"jid": 6}')
- end
- end
-
- it 'enqueues jobs that are due' do
- expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 1 })
- expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 2 })
- expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 4 })
- expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 5 })
-
- Gitlab::SidekiqEnq.new.enqueue_jobs
-
- Sidekiq.redis do |redis|
- expect(redis.zscan_each(retry_set).map(&:first)).to contain_exactly('{"jid": 3}')
- expect(redis.zscan_each(schedule_set).map(&:first)).to contain_exactly('{"jid": 6}')
- end
- end
- end
-
- context 'when atomic_sidekiq_scheduler is disabled' do
- before do
- stub_feature_flags(atomic_sidekiq_scheduler: false)
- end
-
- it_behaves_like 'finds jobs that are due and enqueues them'
-
- context 'when ZRANGEBYSCORE returns a job that is already removed by another process' do
- before do
- Sidekiq.redis do |redis|
- redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 1}')
-
- allow(redis).to receive(:zrangebyscore).and_wrap_original do |m, *args, **kwargs|
- m.call(*args, **kwargs).tap do |jobs|
- redis.zrem(schedule_set, jobs.first) if args[0] == schedule_set && jobs.first
- end
- end
- end
- end
-
- it 'calls ZREM but does not enqueue the job' do
- Sidekiq.redis do |redis|
- expect(redis).to receive(:zrem).with(schedule_set, '{"jid": 1}').twice.and_call_original
- end
- expect(Sidekiq::Client).not_to receive(:push)
-
- Gitlab::SidekiqEnq.new.enqueue_jobs
- end
- end
- end
-
- context 'when atomic_sidekiq_scheduler is enabled' do
- before do
- stub_feature_flags(atomic_sidekiq_scheduler: true)
- end
-
- context 'when Lua script is not yet loaded' do
- before do
- Gitlab::Redis::Queues.with { |redis| redis.script(:flush) }
- end
-
- it_behaves_like 'finds jobs that are due and enqueues them'
- end
-
- context 'when Lua script is already loaded' do
- before do
- Gitlab::SidekiqEnq.new.enqueue_jobs
- end
-
- it_behaves_like 'finds jobs that are due and enqueues them'
- end
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index d801b84775b..210b9162be0 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -272,12 +272,12 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expected_end_payload.merge(
'db_duration_s' => a_value >= 0.1,
'db_count' => a_value >= 1,
- "db_replica_#{db_config_name}_count" => 0,
+ "db_#{db_config_name}_replica_count" => 0,
'db_replica_duration_s' => a_value >= 0,
'db_primary_count' => a_value >= 1,
- "db_primary_#{db_config_name}_count" => a_value >= 1,
+ "db_#{db_config_name}_count" => a_value >= 1,
'db_primary_duration_s' => a_value > 0,
- "db_primary_#{db_config_name}_duration_s" => a_value > 0
+ "db_#{db_config_name}_duration_s" => a_value > 0
)
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 914f5a30c3a..3fbd207c2e1 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -239,6 +239,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
shared_context 'worker declaring data consistency' do
let(:worker_class) { LBTestWorker }
+ let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => 'AB/12345' } }
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => wal_locations } }
before do
stub_const('LBTestWorker', Class.new(TestWorker))
diff --git a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
index 0cf05fb0a5c..2f2499753b9 100644
--- a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
@@ -1,14 +1,25 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe Gitlab::SidekiqStatus::ClientMiddleware do
describe '#call' do
- it 'tracks the job in Redis' do
- expect(Gitlab::SidekiqStatus).to receive(:set).with('123', Gitlab::SidekiqStatus::DEFAULT_EXPIRATION)
+ context 'when the job has status_expiration set' do
+ it 'tracks the job in Redis with a value of 2' do
+ expect(Gitlab::SidekiqStatus).to receive(:set).with('123', 1.hour.to_i, value: 2)
- described_class.new
- .call('Foo', { 'jid' => '123' }, double(:queue), double(:pool)) { nil }
+ described_class.new
+ .call('Foo', { 'jid' => '123', 'status_expiration' => 1.hour.to_i }, double(:queue), double(:pool)) { nil }
+ end
+ end
+
+ context 'when the job does not have status_expiration set' do
+ it 'tracks the job in Redis with a value of 1' do
+ expect(Gitlab::SidekiqStatus).to receive(:set).with('123', Gitlab::SidekiqStatus::DEFAULT_EXPIRATION, value: 1)
+
+ described_class.new
+ .call('Foo', { 'jid' => '123' }, double(:queue), double(:pool)) { nil }
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index fc2ac29a1f9..1e7b52471b0 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -12,6 +12,31 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
Sidekiq.redis do |redis|
expect(redis.exists(key)).to eq(true)
expect(redis.ttl(key) > 0).to eq(true)
+ expect(redis.get(key)).to eq(described_class::DEFAULT_VALUE.to_s)
+ end
+ end
+
+ it 'allows overriding the expiration time' do
+ described_class.set('123', described_class::DEFAULT_EXPIRATION * 2)
+
+ key = described_class.key_for('123')
+
+ Sidekiq.redis do |redis|
+ expect(redis.exists(key)).to eq(true)
+ expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
+ expect(redis.get(key)).to eq(described_class::DEFAULT_VALUE.to_s)
+ end
+ end
+
+ it 'allows overriding the default value' do
+ described_class.set('123', value: 2)
+
+ key = described_class.key_for('123')
+
+ Sidekiq.redis do |redis|
+ expect(redis.exists(key)).to eq(true)
+ expect(redis.ttl(key) > 0).to eq(true)
+ expect(redis.get(key)).to eq('2')
end
end
end
@@ -88,7 +113,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
end
end
- describe 'completed' do
+ describe '.completed_jids' do
it 'returns the completed job' do
expect(described_class.completed_jids(%w(123))).to eq(['123'])
end
@@ -100,4 +125,46 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
end
end
+
+ describe '.job_status' do
+ it 'returns an array of boolean values' do
+ described_class.set('123')
+ described_class.set('456')
+ described_class.unset('123')
+
+ expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false])
+ end
+
+ it 'handles an empty array' do
+ expect(described_class.job_status([])).to eq([])
+ end
+
+ context 'when log_implicit_sidekiq_status_calls is enabled' do
+ it 'logs keys that contained the default value' do
+ described_class.set('123', value: 2)
+ described_class.set('456')
+ described_class.set('012')
+
+ expect(Sidekiq.logger).to receive(:info).with(message: described_class::DEFAULT_VALUE_MESSAGE,
+ keys: [described_class.key_for('456'), described_class.key_for('012')])
+
+ expect(described_class.job_status(%w(123 456 789 012))).to eq([true, true, false, true])
+ end
+ end
+
+ context 'when log_implicit_sidekiq_status_calls is disabled' do
+ before do
+ stub_feature_flags(log_implicit_sidekiq_status_calls: false)
+ end
+
+ it 'does not perform any logging' do
+ described_class.set('123', value: 2)
+ described_class.set('456')
+
+ expect(Sidekiq.logger).not_to receive(:info)
+
+ expect(described_class.job_status(%w(123 456 789))).to eq([true, true, false])
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
index e542ce455bb..a6e7665569c 100644
--- a/spec/lib/gitlab/spamcheck/client_spec.rb
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -32,6 +32,30 @@ RSpec.describe Gitlab::Spamcheck::Client do
stub_application_setting(spam_check_endpoint_url: endpoint)
end
+ describe 'url scheme' do
+ let(:stub) { double(:spamcheck_stub, check_for_spam_issue: response) }
+
+ context 'is tls ' do
+ let(:endpoint) { 'tls://spamcheck.example.com'}
+
+ it 'uses secure connection' do
+ expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^tls://}, ''),
+ instance_of(GRPC::Core::ChannelCredentials),
+ anything).and_return(stub)
+ subject
+ end
+ end
+
+ context 'is grpc' do
+ it 'uses insecure connection' do
+ expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^grpc://}, ''),
+ :this_channel_is_insecure,
+ anything).and_return(stub)
+ subject
+ end
+ end
+ end
+
describe '#issue_spam?' do
before do
allow_next_instance_of(::Spamcheck::SpamcheckService::Stub) do |instance|
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index 4be1c85f7c8..627d3bb42c7 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -56,6 +56,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
where(:method_name, :result) do
:default_subscriptions_url | 'https://customers.staging.gitlab.com'
:payment_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_validation'
+ :registration_validation_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_registration_validation'
:subscriptions_graphql_url | 'https://customers.staging.gitlab.com/graphql'
:subscriptions_more_minutes_url | 'https://customers.staging.gitlab.com/buy_pipeline_minutes'
:subscriptions_more_storage_url | 'https://customers.staging.gitlab.com/buy_storage'
@@ -63,6 +64,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
:subscriptions_plans_url | 'https://about.gitlab.com/pricing/'
:subscriptions_instance_review_url | 'https://customers.staging.gitlab.com/instance_review'
:subscriptions_gitlab_plans_url | 'https://customers.staging.gitlab.com/gitlab_plans'
+ :edit_account_url | 'https://customers.staging.gitlab.com/customers/edit'
end
with_them do
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
index 6004698d092..2b94eaa2db9 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
@@ -48,4 +48,24 @@ RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
end
end
end
+
+ describe '#options' do
+ let_it_be(:group) { create :group }
+
+ before do
+ stub_env('SNOWPLOW_MICRO_URI', 'http://gdk.test:9091')
+ end
+
+ it 'includes protocol with the correct value' do
+ expect(subject.options(group)[:protocol]).to eq 'http'
+ end
+
+ it 'includes port with the correct value' do
+ expect(subject.options(group)[:port]).to eq 9091
+ end
+
+ it 'includes forceSecureTracker with value false' do
+ expect(subject.options(group)[:forceSecureTracker]).to eq false
+ end
+ end
end
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
index f8e73a807c6..06cc2d3800c 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
expect(SnowplowTracker::Tracker)
.to receive(:new)
- .with(emitter, an_instance_of(SnowplowTracker::Subject), Gitlab::Tracking::SNOWPLOW_NAMESPACE, '_abc123_')
+ .with(emitter, an_instance_of(SnowplowTracker::Subject), described_class::SNOWPLOW_NAMESPACE, '_abc123_')
.and_return(tracker)
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 61b2c89ffa1..cd83971aef9 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -13,6 +13,10 @@ RSpec.describe Gitlab::Tracking do
described_class.instance_variable_set("@snowplow", nil)
end
+ after do
+ described_class.instance_variable_set("@snowplow", nil)
+ end
+
describe '.options' do
shared_examples 'delegates to destination' do |klass|
before do
@@ -63,7 +67,7 @@ RSpec.describe Gitlab::Tracking do
appId: '_abc123_',
protocol: 'http',
port: 9090,
- force_secure_tracker: false,
+ forceSecureTracker: false,
formTracking: true,
linkClickTracking: true
}
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
index 1ae4c9414dd..59b944ac398 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_1) { 'metric_1' }
let(:metric_2) { 'metric_2' }
let(:metric_names) { [metric_1, metric_2] }
+ let(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE }
describe 'metric calculations' do
before do
@@ -38,7 +39,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
end
it 'returns the number of unique events in the union of all metrics' do
- expect(calculate_metrics_union.round(2)).to eq(3.12)
+ expect(calculate_metrics_union.round(2)).to be_within(error_rate).percent_of(3)
end
context 'when there is no aggregated data saved' do
@@ -53,7 +54,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_names) { [metric_1] }
it 'returns the number of unique events for that metric' do
- expect(calculate_metrics_union.round(2)).to eq(2.08)
+ expect(calculate_metrics_union.round(2)).to be_within(error_rate).percent_of(2)
end
end
end
@@ -64,7 +65,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
end
it 'returns the number of common events in the intersection of all metrics' do
- expect(calculate_metrics_intersections.round(2)).to eq(1.04)
+ expect(calculate_metrics_intersections.round(2)).to be_within(error_rate).percent_of(1)
end
context 'when there is no aggregated data saved' do
@@ -79,7 +80,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_names) { [metric_1] }
it 'returns the number of common/unique events for the intersection of that metric' do
- expect(calculate_metrics_intersections.round(2)).to eq(2.08)
+ expect(calculate_metrics_intersections.round(2)).to be_within(error_rate).percent_of(2)
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb
new file mode 100644
index 00000000000..c9bc101374f
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowConfiguredToGitlabCollectorMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'for collector_hostname option' do
+ where(:collector_hostname, :expected_value) do
+ 'snowplow.trx.gitlab.net' | true
+ 'foo.bar.something.net' | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(snowplow_collector_hostname: collector_hostname)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb
new file mode 100644
index 00000000000..1e0cdd650fa
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowEnabledMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'for snowplow enabled option' do
+ where(:snowplow_enabled, :expected_value) do
+ true | true
+ false | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(snowplow_enabled: snowplow_enabled)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index dbbc718e147..0f95da74ff9 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -25,30 +25,10 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
end
context 'for count with default column metrics' do
- context 'with usage_data_instrumentation feature flag' do
- context 'when enabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: true)
- end
-
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with ::Gitlab::UsageDataMetrics.suggested_names
- let(:key_path) { 'counts.boards' }
- let(:name_suggestion) { /count_boards/ }
- end
- end
-
- context 'when disabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: false)
- end
-
- it_behaves_like 'name suggestion' do
- # corresponding metric is collected with count(Board)
- let(:key_path) { 'counts.boards' }
- let(:name_suggestion) { /count_boards/ }
- end
- end
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with count(Board)
+ let(:key_path) { 'counts.boards' }
+ let(:name_suggestion) { /count_boards/ }
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index cf544c07195..015ecd1671e 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -80,12 +80,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
end
-
- it 'allows indifferent access' do
- allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).and_return(1)
- expect(subject[:search_unique_visits][:search_unique_visits_for_any_target_monthly]).to eq(1)
- expect(subject[:search_unique_visits]['search_unique_visits_for_any_target_monthly']).to eq(1)
- end
end
describe 'usage_activity_by_stage_package' do
@@ -205,7 +199,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
for_defined_days_back do
user = create(:user)
user2 = create(:user)
- create(:event, author: user)
create(:group_member, user: user)
create(:authentication_event, user: user, provider: :ldapmain, result: :success)
create(:authentication_event, user: user2, provider: :ldapsecondary, result: :success)
@@ -214,17 +207,24 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:authentication_event, user: user, provider: :group_saml, result: :failed)
end
+ for_defined_days_back(days: [31, 29, 3]) do
+ create(:event)
+ end
+
+ stub_const('Gitlab::Database::PostgresHll::BatchDistinctCounter::DEFAULT_BATCH_SIZE', 1)
+ stub_const('Gitlab::Database::PostgresHll::BatchDistinctCounter::MIN_REQUIRED_BATCH_SIZE', 0)
+
expect(described_class.usage_activity_by_stage_manage({})).to include(
events: -1,
groups: 2,
- users_created: 6,
+ users_created: 10,
omniauth_providers: ['google_oauth2'],
user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 }
)
expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include(
- events: be_within(error_rate).percent_of(1),
+ events: be_within(error_rate).percent_of(2),
groups: 1,
- users_created: 3,
+ users_created: 6,
omniauth_providers: ['google_oauth2'],
user_auth_by_provider: { 'group_saml' => 1, 'ldap' => 2, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 }
)
@@ -457,42 +457,16 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
)
end
- context 'with usage_data_instrumentation feature flag' do
- context 'when enabled' do
- it 'merges the data from instrumentation classes' do
- stub_feature_flags(usage_data_instrumentation: true)
-
- for_defined_days_back do
- user = create(:user)
- project = create(:project, creator: user)
- create(:issue, project: project, author: user)
- create(:issue, project: project, author: User.support_bot)
- end
-
- expect(described_class.usage_activity_by_stage_plan({})).to include(issues: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK)
- expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK)
-
- uncached_data = described_class.uncached_data
- expect(uncached_data[:usage_activity_by_stage][:plan]).to include(issues: 3)
- expect(uncached_data[:usage_activity_by_stage_monthly][:plan]).to include(issues: 2)
- end
+ it 'does not merge the data from instrumentation classes' do
+ for_defined_days_back do
+ user = create(:user)
+ project = create(:project, creator: user)
+ create(:issue, project: project, author: user)
+ create(:issue, project: project, author: User.support_bot)
end
- context 'when disabled' do
- it 'does not merge the data from instrumentation classes' do
- stub_feature_flags(usage_data_instrumentation: false)
-
- for_defined_days_back do
- user = create(:user)
- project = create(:project, creator: user)
- create(:issue, project: project, author: user)
- create(:issue, project: project, author: User.support_bot)
- end
-
- expect(described_class.usage_activity_by_stage_plan({})).to include(issues: 3)
- expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: 2)
- end
- end
+ expect(described_class.usage_activity_by_stage_plan({})).to include(issues: 3)
+ expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: 2)
end
end
@@ -510,53 +484,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
deployments: 2,
failed_deployments: 2,
releases: 2,
- successful_deployments: 2
+ successful_deployments: 2,
+ releases_with_milestones: 2
)
expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(
deployments: 1,
failed_deployments: 1,
releases: 1,
- successful_deployments: 1
+ successful_deployments: 1,
+ releases_with_milestones: 1
)
end
-
- context 'with usage_data_instrumentation feature flag' do
- before do
- for_defined_days_back do
- user = create(:user)
- create(:deployment, :failed, user: user)
- release = create(:release, author: user)
- create(:milestone, project: release.project, releases: [release])
- create(:deployment, :success, user: user)
- end
- end
-
- context 'when enabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: true)
- end
-
- it 'merges data from instrumentation classes' do
- expect(described_class.usage_activity_by_stage_release({})).to include(releases_with_milestones: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK)
- expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(releases_with_milestones: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK)
-
- uncached_data = described_class.uncached_data
- expect(uncached_data[:usage_activity_by_stage][:release]).to include(releases_with_milestones: 2)
- expect(uncached_data[:usage_activity_by_stage_monthly][:release]).to include(releases_with_milestones: 1)
- end
- end
-
- context 'when disabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: false)
- end
-
- it 'does not merge data from instrumentation classes' do
- expect(described_class.usage_activity_by_stage_release({})).to include(releases_with_milestones: 2)
- expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(releases_with_milestones: 1)
- end
- end
- end
end
describe 'usage_activity_by_stage_verify' do
@@ -605,16 +543,15 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.data }
it 'gathers usage data' do
- expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS.map(&:to_s))
+ expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS)
end
it 'gathers usage counts', :aggregate_failures do
count_data = subject[:counts]
expect(count_data[:boards]).to eq(1)
expect(count_data[:projects]).to eq(4)
- count_keys = UsageDataHelpers::COUNTS_KEYS.map(&:to_s)
- expect(count_data.keys).to include(*count_keys)
- expect(count_keys - count_data.keys).to be_empty
+ expect(count_data.keys).to include(*UsageDataHelpers::COUNTS_KEYS)
+ expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty
expect(count_data.values).to all(be_a_kind_of(Integer))
end
@@ -699,7 +636,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
external_diffs: { enabled: false },
lfs: { enabled: true, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
uploads: { enabled: nil, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
- packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } }.with_indifferent_access
+ packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } }
)
end
@@ -747,23 +684,50 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- it 'works when queries time out' do
- allow_any_instance_of(ActiveRecord::Relation)
- .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'when queries time out' do
+ let(:metric_method) { :count }
+
+ before do
+ allow_any_instance_of(ActiveRecord::Relation).to receive(metric_method).and_raise(ActiveRecord::StatementInvalid)
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
+ end
+
+ context 'with should_raise_for_dev? true' do
+ let(:should_raise_for_dev) { true }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
+ end
+
+ context 'when metric calls find_in_batches' do
+ let(:metric_method) { :find_in_batches }
+
+ it 'raises an error for jira_usage' do
+ expect { described_class.jira_usage }.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+ end
+
+ context 'with should_raise_for_dev? false' do
+ let(:should_raise_for_dev) { false }
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
- expect { subject }.not_to raise_error
+ context 'when metric calls find_in_batches' do
+ let(:metric_method) { :find_in_batches }
+
+ it 'does not raise an error for jira_usage' do
+ expect { described_class.jira_usage }.not_to raise_error
+ end
+ end
+ end
end
it 'includes a recording_ce_finished_at timestamp' do
expect(subject[:recording_ce_finished_at]).to be_a(Time)
end
-
- it 'jira usage works when queries time out' do
- allow_any_instance_of(ActiveRecord::Relation)
- .to receive(:find_in_batches).and_raise(ActiveRecord::StatementInvalid.new(''))
-
- expect { described_class.jira_usage }.not_to raise_error
- end
end
describe '.system_usage_data_monthly' do
@@ -873,37 +837,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.license_usage_data }
it 'gathers license data' do
+ expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
expect(subject[:version]).to eq(Gitlab::VERSION)
expect(subject[:installation_type]).to eq('gitlab-development-kit')
+ expect(subject[:active_user_count]).to eq(User.active.size)
expect(subject[:recorded_at]).to be_a(Time)
end
-
- context 'with usage_data_instrumentation feature flag' do
- context 'when enabled' do
- it 'merges uuid and hostname data from instrumentation classes' do
- stub_feature_flags(usage_data_instrumentation: true)
-
- expect(subject[:uuid]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK)
- expect(subject[:hostname]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK)
- expect(subject[:active_user_count]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK)
-
- uncached_data = described_class.data
- expect(uncached_data[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
- expect(uncached_data[:hostname]).to eq(Gitlab.config.gitlab.host)
- expect(uncached_data[:active_user_count]).to eq(User.active.size)
- end
- end
-
- context 'when disabled' do
- it 'does not merge uuid and hostname data from instrumentation classes' do
- stub_feature_flags(usage_data_instrumentation: false)
-
- expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
- expect(subject[:hostname]).to eq(Gitlab.config.gitlab.host)
- expect(subject[:active_user_count]).to eq(User.active.size)
- end
- end
- end
end
context 'when not relying on database records' do
@@ -1139,6 +1078,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe ".system_usage_data_settings" do
let(:prometheus_client) { double(Gitlab::PrometheusClient) }
+ let(:snowplow_gitlab_host?) { Gitlab::CurrentSettings.snowplow_collector_hostname == 'snowplow.trx.gitlab.net' }
before do
allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
@@ -1166,51 +1106,34 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:settings][:gitaly_apdex]).to be_within(0.001).of(0.95)
end
- context 'with usage_data_instrumentation feature flag' do
- context 'when enabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: true)
- end
-
- it 'reports collected data categories' do
- expected_value = %w[standard subscription operational optional]
+ it 'reports collected data categories' do
+ expected_value = %w[standard subscription operational optional]
- allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
- expect(instance).to receive(:execute).and_return(expected_value)
- end
-
- expect(described_class.data[:settings][:collected_data_categories]).to eq(expected_value)
- end
-
- it 'gathers service_ping_features_enabled' do
- expect(described_class.data[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled)
- end
+ allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
+ expect(instance).to receive(:execute).and_return(expected_value)
end
- context 'when disabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: false)
- end
-
- it 'reports collected data categories' do
- expected_value = %w[standard subscription operational optional]
-
- allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
- expect(instance).to receive(:execute).and_return(expected_value)
- end
-
- expect(subject[:settings][:collected_data_categories]).to eq(expected_value)
- end
+ expect(subject[:settings][:collected_data_categories]).to eq(expected_value)
+ end
- it 'gathers service_ping_features_enabled' do
- expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled)
- end
- end
+ it 'gathers service_ping_features_enabled' do
+ expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled)
end
it 'gathers user_cap_feature_enabled' do
expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap)
end
+
+ context 'snowplow stats' do
+ before do
+ stub_feature_flags(usage_data_instrumentation: false)
+ end
+
+ it 'gathers snowplow stats' do
+ expect(subject[:settings][:snowplow_enabled]).to eq(Gitlab::CurrentSettings.snowplow_enabled?)
+ expect(subject[:settings][:snowplow_configured_to_gitlab_collector]).to eq(snowplow_gitlab_host?)
+ end
+ end
end
end
@@ -1332,6 +1255,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
'i_analytics_cohorts' => 123,
'i_analytics_dev_ops_score' => 123,
'i_analytics_instance_statistics' => 123,
+ 'p_analytics_ci_cd_deployment_frequency' => 123,
+ 'p_analytics_ci_cd_lead_time' => 123,
+ 'p_analytics_ci_cd_pipelines' => 123,
'p_analytics_merge_request' => 123,
'i_analytics_dev_ops_adoption' => 123,
'users_viewing_analytics_group_devops_adoption' => 123,
@@ -1402,33 +1328,21 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
- context 'with redis_hll_tracking feature enabled' do
- it 'has all known_events' do
- stub_feature_flags(redis_hll_tracking: true)
-
- expect(subject).to have_key(:redis_hll_counters)
+ it 'has all known_events' do
+ expect(subject).to have_key(:redis_hll_counters)
- expect(subject[:redis_hll_counters].keys).to match_array(categories)
+ expect(subject[:redis_hll_counters].keys).to match_array(categories)
- categories.each do |category|
- keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
+ categories.each do |category|
+ keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
- metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
+ metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
- if ::Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_FOR_TOTALS.include?(category)
- metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
- end
-
- expect(subject[:redis_hll_counters][category].keys).to match_array(metrics)
+ if ::Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_FOR_TOTALS.include?(category)
+ metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
end
- end
- end
- context 'with redis_hll_tracking disabled' do
- it 'does not have redis_hll_tracking key' do
- stub_feature_flags(redis_hll_tracking: false)
-
- expect(subject).not_to have_key(:redis_hll_counters)
+ expect(subject[:redis_hll_counters][category].keys).to match_array(metrics)
end
end
end
@@ -1468,46 +1382,58 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
context 'when queries time out' do
before do
- allow_any_instance_of(ActiveRecord::Relation)
- .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ allow_any_instance_of(ActiveRecord::Relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid)
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
end
- it 'returns -1 for email campaign data' do
- expected_data = {
- "in_product_marketing_email_create_0_sent" => -1,
- "in_product_marketing_email_create_0_cta_clicked" => -1,
- "in_product_marketing_email_create_1_sent" => -1,
- "in_product_marketing_email_create_1_cta_clicked" => -1,
- "in_product_marketing_email_create_2_sent" => -1,
- "in_product_marketing_email_create_2_cta_clicked" => -1,
- "in_product_marketing_email_team_short_0_sent" => -1,
- "in_product_marketing_email_team_short_0_cta_clicked" => -1,
- "in_product_marketing_email_trial_short_0_sent" => -1,
- "in_product_marketing_email_trial_short_0_cta_clicked" => -1,
- "in_product_marketing_email_admin_verify_0_sent" => -1,
- "in_product_marketing_email_admin_verify_0_cta_clicked" => -1,
- "in_product_marketing_email_verify_0_sent" => -1,
- "in_product_marketing_email_verify_0_cta_clicked" => -1,
- "in_product_marketing_email_verify_1_sent" => -1,
- "in_product_marketing_email_verify_1_cta_clicked" => -1,
- "in_product_marketing_email_verify_2_sent" => -1,
- "in_product_marketing_email_verify_2_cta_clicked" => -1,
- "in_product_marketing_email_trial_0_sent" => -1,
- "in_product_marketing_email_trial_0_cta_clicked" => -1,
- "in_product_marketing_email_trial_1_sent" => -1,
- "in_product_marketing_email_trial_1_cta_clicked" => -1,
- "in_product_marketing_email_trial_2_sent" => -1,
- "in_product_marketing_email_trial_2_cta_clicked" => -1,
- "in_product_marketing_email_team_0_sent" => -1,
- "in_product_marketing_email_team_0_cta_clicked" => -1,
- "in_product_marketing_email_team_1_sent" => -1,
- "in_product_marketing_email_team_1_cta_clicked" => -1,
- "in_product_marketing_email_team_2_sent" => -1,
- "in_product_marketing_email_team_2_cta_clicked" => -1,
- "in_product_marketing_email_experience_0_sent" => -1
- }
+ context 'with should_raise_for_dev? true' do
+ let(:should_raise_for_dev) { true }
- expect(subject).to eq(expected_data)
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+
+ context 'with should_raise_for_dev? false' do
+ let(:should_raise_for_dev) { false }
+
+ it 'returns -1 for email campaign data' do
+ expected_data = {
+ "in_product_marketing_email_create_0_sent" => -1,
+ "in_product_marketing_email_create_0_cta_clicked" => -1,
+ "in_product_marketing_email_create_1_sent" => -1,
+ "in_product_marketing_email_create_1_cta_clicked" => -1,
+ "in_product_marketing_email_create_2_sent" => -1,
+ "in_product_marketing_email_create_2_cta_clicked" => -1,
+ "in_product_marketing_email_team_short_0_sent" => -1,
+ "in_product_marketing_email_team_short_0_cta_clicked" => -1,
+ "in_product_marketing_email_trial_short_0_sent" => -1,
+ "in_product_marketing_email_trial_short_0_cta_clicked" => -1,
+ "in_product_marketing_email_admin_verify_0_sent" => -1,
+ "in_product_marketing_email_admin_verify_0_cta_clicked" => -1,
+ "in_product_marketing_email_verify_0_sent" => -1,
+ "in_product_marketing_email_verify_0_cta_clicked" => -1,
+ "in_product_marketing_email_verify_1_sent" => -1,
+ "in_product_marketing_email_verify_1_cta_clicked" => -1,
+ "in_product_marketing_email_verify_2_sent" => -1,
+ "in_product_marketing_email_verify_2_cta_clicked" => -1,
+ "in_product_marketing_email_trial_0_sent" => -1,
+ "in_product_marketing_email_trial_0_cta_clicked" => -1,
+ "in_product_marketing_email_trial_1_sent" => -1,
+ "in_product_marketing_email_trial_1_cta_clicked" => -1,
+ "in_product_marketing_email_trial_2_sent" => -1,
+ "in_product_marketing_email_trial_2_cta_clicked" => -1,
+ "in_product_marketing_email_team_0_sent" => -1,
+ "in_product_marketing_email_team_0_cta_clicked" => -1,
+ "in_product_marketing_email_team_1_sent" => -1,
+ "in_product_marketing_email_team_1_cta_clicked" => -1,
+ "in_product_marketing_email_team_2_sent" => -1,
+ "in_product_marketing_email_team_2_cta_clicked" => -1,
+ "in_product_marketing_email_experience_0_sent" => -1
+ }
+
+ expect(subject).to eq(expected_data)
+ end
end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index e721b28ac29..325ace6fbbf 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -5,32 +5,38 @@ require 'spec_helper'
RSpec.describe Gitlab::Utils::UsageData do
include Database::DatabaseHelpers
- describe '#add_metric' do
- let(:metric) { 'UuidMetric'}
+ shared_examples 'failing hardening method' do
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
+ stub_const("Gitlab::Utils::UsageData::FALLBACK", fallback)
+ allow(failing_class).to receive(failing_method).and_raise(ActiveRecord::StatementInvalid)
+ end
- context 'with usage_data_instrumentation feature flag' do
- context 'when enabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: true)
- end
+ context 'with should_raise_for_dev? false' do
+ let(:should_raise_for_dev) { false }
- it 'returns -100 value to be overriden' do
- expect(described_class.add_metric(metric)).to eq(-100)
- end
+ it 'returns the fallback' do
+ expect(subject).to eq(fallback)
end
+ end
- context 'when disabled' do
- before do
- stub_feature_flags(usage_data_instrumentation: false)
- end
+ context 'with should_raise_for_dev? true' do
+ let(:should_raise_for_dev) { true }
- it 'computes the metric value for given metric' do
- expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid)
- end
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
end
end
end
+ describe '#add_metric' do
+ let(:metric) { 'UuidMetric'}
+
+ it 'computes the metric value for given metric' do
+ expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid)
+ end
+ end
+
describe '#count' do
let(:relation) { double(:relation) }
@@ -40,11 +46,14 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.count(relation, batch: false)).to eq(1)
end
- it 'returns the fallback value when counting fails' do
- stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
- allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'when counting fails' do
+ subject { described_class.count(relation, batch: false) }
- expect(described_class.count(relation, batch: false)).to eq(15)
+ let(:fallback) { 15 }
+ let(:failing_class) { relation }
+ let(:failing_method) { :count }
+
+ it_behaves_like 'failing hardening method'
end
end
@@ -57,11 +66,14 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.distinct_count(relation, batch: false)).to eq(1)
end
- it 'returns the fallback value when counting fails' do
- stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
- allow(relation).to receive(:distinct_count_by).and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'when counting fails' do
+ subject { described_class.distinct_count(relation, batch: false) }
+
+ let(:fallback) { 15 }
+ let(:failing_class) { relation }
+ let(:failing_method) { :distinct_count_by }
- expect(described_class.distinct_count(relation, batch: false)).to eq(15)
+ it_behaves_like 'failing hardening method'
end
end
@@ -106,7 +118,7 @@ RSpec.describe Gitlab::Utils::UsageData do
# build_needs set: ['1', '2', '3', '4', '5']
# ci_build set ['a', 'b']
# with them, current implementation is expected to consistently report
- # 5.217656147118495 and 2.0809220082170614 values
+ # the same static values
# This test suite is expected to assure, that HyperLogLog implementation
# behaves consistently between changes made to other parts of codebase.
# In case of fine tuning or changes to HyperLogLog algorithm implementation
@@ -118,8 +130,8 @@ RSpec.describe Gitlab::Utils::UsageData do
let(:model) { Ci::BuildNeed }
let(:column) { :name }
- let(:build_needs_estimated_cardinality) { 5.217656147118495 }
- let(:ci_builds_estimated_cardinality) { 2.0809220082170614 }
+ let(:build_needs_estimated_cardinality) { 5.024574181542231 }
+ let(:ci_builds_estimated_cardinality) { 2.003916452421793 }
before do
allow(model.connection).to receive(:transaction_open?).and_return(false)
@@ -173,14 +185,24 @@ RSpec.describe Gitlab::Utils::UsageData do
stub_const("Gitlab::Utils::UsageData::DISTRIBUTED_HLL_FALLBACK", 4)
end
- it 'returns fallback if counter raises WRONG_CONFIGURATION_ERROR' do
- expect(described_class.estimate_batch_distinct_count(relation, 'id', start: 1, finish: 0)).to eq 3
+ context 'when counter raises WRONG_CONFIGURATION_ERROR' do
+ subject { described_class.estimate_batch_distinct_count(relation, 'id', start: 1, finish: 0) }
+
+ let(:fallback) { 3 }
+ let(:failing_class) { Gitlab::Database::PostgresHll::BatchDistinctCounter }
+ let(:failing_method) { :new }
+
+ it_behaves_like 'failing hardening method'
end
- it 'returns default fallback value when counting fails due to database error' do
- allow(Gitlab::Database::PostgresHll::BatchDistinctCounter).to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'when counting fails due to database error' do
+ subject { described_class.estimate_batch_distinct_count(relation) }
+
+ let(:fallback) { 3 }
+ let(:failing_class) { Gitlab::Database::PostgresHll::BatchDistinctCounter }
+ let(:failing_method) { :new }
- expect(described_class.estimate_batch_distinct_count(relation)).to eq(3)
+ it_behaves_like 'failing hardening method'
end
it 'logs error and returns DISTRIBUTED_HLL_FALLBACK value when counting raises any error', :aggregate_failures do
@@ -205,13 +227,14 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.sum(relation, :column, batch_size: 100, start: 2, finish: 3)).to eq(1)
end
- it 'returns the fallback value when counting fails' do
- stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
- allow(Gitlab::Database::BatchCount)
- .to receive(:batch_sum)
- .and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'when counting fails' do
+ subject { described_class.sum(relation, :column) }
- expect(described_class.sum(relation, :column)).to eq(15)
+ let(:fallback) { 15 }
+ let(:failing_class) { Gitlab::Database::BatchCount }
+ let(:failing_method) { :batch_sum }
+
+ it_behaves_like 'failing hardening method'
end
end
@@ -291,23 +314,45 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(histogram).to eq('2' => 1)
end
- it 'returns fallback and logs canceled queries' do
- create(:alert_management_http_integration, :active, project: project1)
+ context 'when query timeout' do
+ subject do
+ with_statement_timeout(0.001) do
+ relation = AlertManagement::HttpIntegration.select('pg_sleep(0.002)')
+ described_class.histogram(relation, column, buckets: 1..100)
+ end
+ end
- expect(Gitlab::AppJsonLogger).to receive(:error).with(
- event: 'histogram',
- relation: relation.table_name,
- operation: 'histogram',
- operation_args: [column, 1, 100, 99],
- query: kind_of(String),
- message: /PG::QueryCanceled/
- )
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
+ create(:alert_management_http_integration, :active, project: project1)
+ end
- with_statement_timeout(0.001) do
- relation = AlertManagement::HttpIntegration.select('pg_sleep(0.002)')
- histogram = described_class.histogram(relation, column, buckets: 1..100)
+ context 'with should_raise_for_dev? false' do
+ let(:should_raise_for_dev) { false }
+
+ it 'logs canceled queries' do
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(
+ event: 'histogram',
+ relation: relation.table_name,
+ operation: 'histogram',
+ operation_args: [column, 1, 100, 99],
+ query: kind_of(String),
+ message: /PG::QueryCanceled/
+ )
+ subject
+ end
- expect(histogram).to eq(fallback)
+ it 'returns fallback' do
+ expect(subject).to eq(fallback)
+ end
+ end
+
+ context 'with should_raise_for_dev? true' do
+ let(:should_raise_for_dev) { true }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(ActiveRecord::QueryCanceled)
+ end
end
end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index f1601294c07..d756ec5ef83 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -249,10 +249,16 @@ RSpec.describe Gitlab::Utils do
end
describe '.which' do
- it 'finds the full path to an executable binary' do
- expect(File).to receive(:executable?).with('/bin/sh').and_return(true)
+ before do
+ stub_env('PATH', '/sbin:/usr/bin:/home/joe/bin')
+ end
+
+ it 'finds the full path to an executable binary in order of appearance' do
+ expect(File).to receive(:executable?).with('/sbin/tool').ordered.and_return(false)
+ expect(File).to receive(:executable?).with('/usr/bin/tool').ordered.and_return(true)
+ expect(File).not_to receive(:executable?).with('/home/joe/bin/tool')
- expect(which('sh', 'PATH' => '/bin')).to eq('/bin/sh')
+ expect(which('tool')).to eq('/usr/bin/tool')
end
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 3dd8f7c413e..3284c9cd0d1 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -209,4 +209,47 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
expect(subject.header).to eq({ 'User-Agent': 'GitLab/10.3 (GPN:GitLab;)' })
end
end
+
+ describe '#list_projects' do
+ subject { client.list_projects }
+
+ let(:list_of_projects) { [{}, {}, {}] }
+ let(:next_page_token) { nil }
+ let(:operation) { double('projects': list_of_projects, 'next_page_token': next_page_token) }
+
+ it 'calls Google Api CloudResourceManagerService#list_projects' do
+ expect_any_instance_of(Google::Apis::CloudresourcemanagerV1::CloudResourceManagerService)
+ .to receive(:list_projects)
+ .and_return(operation)
+ is_expected.to eq(list_of_projects)
+ end
+ end
+
+ describe '#create_service_account' do
+ subject { client.create_service_account(spy, spy, spy) }
+
+ let(:operation) { double('Service Account') }
+
+ it 'calls Google Api IamService#create_service_account' do
+ expect_any_instance_of(Google::Apis::IamV1::IamService)
+ .to receive(:create_service_account)
+ .with(any_args)
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
+
+ describe '#create_service_account_key' do
+ subject { client.create_service_account_key(spy, spy) }
+
+ let(:operation) { double('Service Account Key') }
+
+ it 'class Google Api IamService#create_service_account_key' do
+ expect_any_instance_of(Google::Apis::IamV1::IamService)
+ .to receive(:create_service_account_key)
+ .with(any_args)
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
end
diff --git a/spec/lib/sidebars/concerns/link_with_html_options_spec.rb b/spec/lib/sidebars/concerns/link_with_html_options_spec.rb
new file mode 100644
index 00000000000..1e890bffad1
--- /dev/null
+++ b/spec/lib/sidebars/concerns/link_with_html_options_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Concerns::LinkWithHtmlOptions do
+ let(:options) { {} }
+
+ subject { Class.new { include Sidebars::Concerns::LinkWithHtmlOptions }.new }
+
+ before do
+ allow(subject).to receive(:container_html_options).and_return(options)
+ end
+
+ describe '#link_html_options' do
+ context 'with existing classes' do
+ let(:options) do
+ {
+ class: '_class1_ _class2_',
+ aria: { label: '_label_' }
+ }
+ end
+
+ it 'includes class and default aria-label attribute' do
+ result = {
+ class: '_class1_ _class2_ gl-link',
+ aria: { label: '_label_' }
+ }
+
+ expect(subject.link_html_options).to eq(result)
+ end
+ end
+
+ context 'without existing classes' do
+ it 'includes gl-link class' do
+ expect(subject.link_html_options).to eq(class: 'gl-link')
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
index e954d7a44ba..bc1fa3e88ff 100644
--- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
let_it_be(:owner) { create(:user) }
- let_it_be(:group) do
+ let_it_be_with_reload(:group) do
build(:group, :private).tap do |g|
g.add_owner(owner)
end
@@ -70,6 +70,18 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
describe 'Menu items' do
subject { find_menu(menu, item_id) }
+ shared_examples 'the menu entry is available' do
+ it 'the menu item is added to list of menu items' do
+ is_expected.not_to be_nil
+ end
+ end
+
+ shared_examples 'the menu entry is not available' do
+ it 'the menu item is not added to list of menu items' do
+ is_expected.to be_nil
+ end
+ end
+
describe 'Packages Registry' do
let(:item_id) { :packages_registry }
@@ -81,17 +93,13 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
context 'when config package setting is disabled' do
let(:packages_enabled) { false }
- it 'the menu item is not added to list of menu items' do
- is_expected.to be_nil
- end
+ it_behaves_like 'the menu entry is not available'
end
context 'when config package setting is enabled' do
let(:packages_enabled) { true }
- it 'the menu item is added to list of menu items' do
- is_expected.not_to be_nil
- end
+ it_behaves_like 'the menu entry is available'
end
end
end
@@ -107,24 +115,18 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
context 'when config registry setting is disabled' do
let(:container_enabled) { false }
- it 'the menu item is not added to list of menu items' do
- is_expected.to be_nil
- end
+ it_behaves_like 'the menu entry is not available'
end
context 'when config registry setting is enabled' do
let(:container_enabled) { true }
- it 'the menu item is added to list of menu items' do
- is_expected.not_to be_nil
- end
+ it_behaves_like 'the menu entry is available'
context 'when user cannot read container images' do
let(:user) { nil }
- it 'the menu item is not added to list of menu items' do
- is_expected.to be_nil
- end
+ it_behaves_like 'the menu entry is not available'
end
end
end
@@ -141,17 +143,28 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
context 'when config dependency_proxy is enabled' do
let(:dependency_enabled) { true }
- it 'the menu item is added to list of menu items' do
- is_expected.not_to be_nil
+ it_behaves_like 'the menu entry is available'
+
+ context 'when the group settings exist' do
+ let_it_be(:dependency_proxy_group_setting) { create(:dependency_proxy_group_setting, group: group) }
+
+ it_behaves_like 'the menu entry is available'
+
+ context 'when the proxy is disabled at the group level' do
+ before do
+ dependency_proxy_group_setting.enabled = false
+ dependency_proxy_group_setting.save!
+ end
+
+ it_behaves_like 'the menu entry is not available'
+ end
end
end
context 'when config dependency_proxy is not enabled' do
let(:dependency_enabled) { false }
- it 'the menu item is not added to list of menu items' do
- is_expected.to be_nil
- end
+ it_behaves_like 'the menu entry is not available'
end
end
@@ -159,9 +172,7 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
let(:user) { nil }
let(:dependency_enabled) { true }
- it 'the menu item is not added to list of menu items' do
- is_expected.to be_nil
- end
+ it_behaves_like 'the menu entry is not available'
end
end
end
diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb
index eb6a68f1afd..bdd9f22d5a0 100644
--- a/spec/lib/sidebars/menu_spec.rb
+++ b/spec/lib/sidebars/menu_spec.rb
@@ -153,6 +153,25 @@ RSpec.describe Sidebars::Menu do
end
end
+ describe '#remove_element' do
+ let(:item1) { Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}, item_id: :foo1) }
+ let(:item2) { Sidebars::MenuItem.new(title: 'foo2', link: 'foo2', active_routes: {}, item_id: :foo2) }
+ let(:item3) { Sidebars::MenuItem.new(title: 'foo3', link: 'foo3', active_routes: {}, item_id: :foo3) }
+ let(:list) { [item1, item2, item3] }
+
+ it 'removes specific element' do
+ menu.remove_element(list, :foo2)
+
+ expect(list).to eq [item1, item3]
+ end
+
+ it 'does not remove nil elements' do
+ menu.remove_element(list, nil)
+
+ expect(list).to eq [item1, item2, item3]
+ end
+ end
+
describe '#container_html_options' do
before do
allow(menu).to receive(:title).and_return('Foo Menu')
diff --git a/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb b/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb
new file mode 100644
index 00000000000..534267a329e
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::ShimoMenu do
+ let_it_be_with_reload(:project) { create(:project) }
+
+ let(:context) { Sidebars::Projects::Context.new(current_user: project.owner, container: project) }
+
+ subject(:shimo_menu) { described_class.new(context) }
+
+ describe '#render?' do
+ context 'without a valid Shimo integration' do
+ it "doesn't render the menu" do
+ expect(shimo_menu.render?).to be_falsey
+ end
+ end
+
+ context 'with a valid Shimo integration' do
+ let_it_be_with_reload(:shimo_integration) { create(:shimo_integration, project: project) }
+
+ context 'when integration is active' do
+ it 'renders the menu' do
+ expect(shimo_menu.render?).to eq true
+ end
+
+ it 'renders menu link' do
+ expected_url = Rails.application.routes.url_helpers.project_integrations_shimo_path(project)
+ expect(shimo_menu.link).to eq expected_url
+ end
+ end
+
+ context 'when integration is inactive' do
+ before do
+ shimo_integration.update!(active: false)
+ end
+
+ it "doesn't render the menu" do
+ expect(shimo_menu.render?).to eq false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/version_check_spec.rb b/spec/lib/version_check_spec.rb
index 23c381e241e..d7a772a3f7e 100644
--- a/spec/lib/version_check_spec.rb
+++ b/spec/lib/version_check_spec.rb
@@ -3,9 +3,67 @@
require 'spec_helper'
RSpec.describe VersionCheck do
+ describe '.image_url' do
+ it 'returns the correct URL' do
+ expect(described_class.image_url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.svg\?gitlab_info=\w+})
+ end
+ end
+
describe '.url' do
it 'returns the correct URL' do
- expect(described_class.url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.svg\?gitlab_info=\w+})
+ expect(described_class.url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.json\?gitlab_info=\w+})
+ end
+ end
+
+ describe '#calculate_reactive_cache' do
+ context 'response code is 200' do
+ before do
+ stub_request(:get, described_class.url).to_return(status: 200, body: '{ "status": "success" }', headers: {})
+ end
+
+ it 'returns the response object' do
+ expect(described_class.new.calculate_reactive_cache).to eq("{ \"status\": \"success\" }")
+ end
+ end
+
+ context 'response code is not 200' do
+ before do
+ stub_request(:get, described_class.url).to_return(status: 500, body: nil, headers: {})
+ end
+
+ it 'returns nil' do
+ expect(described_class.new.calculate_reactive_cache).to be(nil)
+ end
+ end
+ end
+
+ describe '#response' do
+ context 'cache returns value' do
+ let(:response) { { "severity" => "success" }.to_json }
+
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:with_reactive_cache).and_return(response)
+ end
+ end
+
+ it 'returns the response object' do
+ expect(described_class.new.response).to be(response)
+ end
+ end
+
+ context 'cache returns nil' do
+ let(:response) { nil }
+
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:with_reactive_cache).and_return(response)
+ end
+ end
+
+ it 'returns nil' do
+ expect(described_class.new.response).to be(nil)
+ end
end
end
end