Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb2
-rw-r--r--spec/lib/gitlab/app_logger_spec.rb7
-rw-r--r--spec/lib/gitlab/application_context_spec.rb8
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb23
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb5
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_namespace_details_spec.rb62
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_namespace_on_issues_spec.rb17
-rw-r--r--spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb95
-rw-r--r--spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb222
-rw-r--r--spec/lib/gitlab/background_migration/batched_migration_job_spec.rb77
-rw-r--r--spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb72
-rw-r--r--spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb5
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb70
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb70
-rw-r--r--spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb89
-rw-r--r--spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb4
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb1
-rw-r--r--spec/lib/gitlab/cache/metrics_spec.rb118
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb42
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb27
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/config/entry/variable_spec.rb118
-rw-r--r--spec/lib/gitlab/ci/config/entry/variables_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/config/external/file/base_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb62
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb136
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb43
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb119
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb224
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/component_spec.rb70
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/report_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/reports/security/flag_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/reports/security/reports_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/templates/MATLAB_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/katalon_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/npm_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/variables/collection/item_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/variables/collection_spec.rb53
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb77
-rw-r--r--spec/lib/gitlab/cluster/lifecycle_events_spec.rb59
-rw-r--r--spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb30
-rw-r--r--spec/lib/gitlab/config_checker/external_database_checker_spec.rb21
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb25
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb12
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb98
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb1
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb18
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb96
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb12
-rw-r--r--spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb4
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery/sampler_spec.rb80
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb34
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb15
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb5
-rw-r--r--spec/lib/gitlab/database/migration_helpers/v2_spec.rb94
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb1276
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb3
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb95
-rw-r--r--spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb679
-rw-r--r--spec/lib/gitlab/database/migrations/extension_helpers_spec.rb65
-rw-r--r--spec/lib/gitlab/database/migrations/lock_retries_helpers_spec.rb52
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb63
-rw-r--r--spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb91
-rw-r--r--spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb14
-rw-r--r--spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb8
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb170
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb9
-rw-r--r--spec/lib/gitlab/database/postgres_partition_spec.rb32
-rw-r--r--spec/lib/gitlab/database/query_analyzer_spec.rb8
-rw-r--r--spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb121
-rw-r--r--spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb (renamed from spec/lib/gitlab/database/query_analyzers/ci/partitioning_analyzer_spec.rb)12
-rw-r--r--spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb38
-rw-r--r--spec/lib/gitlab/database/tables_truncate_spec.rb20
-rw-r--r--spec/lib/gitlab/database/type/symbolized_jsonb_spec.rb64
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb4
-rw-r--r--spec/lib/gitlab/database_spec.rb6
-rw-r--r--spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb3
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb32
-rw-r--r--spec/lib/gitlab/doorkeeper_secret_storing/token/pbkdf2_sha512_spec.rb10
-rw-r--r--spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb10
-rw-r--r--spec/lib/gitlab/email/handler_spec.rb2
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb62
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb19
-rw-r--r--spec/lib/gitlab/experimentation/group_types_spec.rb13
-rw-r--r--spec/lib/gitlab/feature_categories_spec.rb24
-rw-r--r--spec/lib/gitlab/git/object_pool_spec.rb66
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb12
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb2
-rw-r--r--spec/lib/gitlab/git_ref_validator_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb33
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb227
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb20
-rw-r--r--spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb275
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb134
-rw-r--r--spec/lib/gitlab/github_import/attachments_downloader_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb118
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb75
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb149
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb16
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb83
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_request_importer_spec.rb35
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb141
-rw-r--r--spec/lib/gitlab/github_import/representation/protected_branch_spec.rb15
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb49
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb52
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/filter_parameters_spec.rb62
-rw-r--r--spec/lib/gitlab/health_checks/gitaly_check_spec.rb4
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml6
-rw-r--r--spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/design_repo_restorer_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/fork_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/group/tree_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb75
-rw-r--r--spec/lib/gitlab/import_export/project/relation_saver_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb54
-rw-r--r--spec/lib/gitlab/import_export/repo_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml4
-rw-r--r--spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb4
-rw-r--r--spec/lib/gitlab/incoming_email_spec.rb99
-rw-r--r--spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb29
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb19
-rw-r--r--spec/lib/gitlab/json_logger_spec.rb20
-rw-r--r--spec/lib/gitlab/json_spec.rb56
-rw-r--r--spec/lib/gitlab/kas_spec.rb12
-rw-r--r--spec/lib/gitlab/kroki_spec.rb3
-rw-r--r--spec/lib/gitlab/memory/watchdog/configuration_spec.rb61
-rw-r--r--spec/lib/gitlab/memory/watchdog/configurator_spec.rb199
-rw-r--r--spec/lib/gitlab/memory/watchdog/monitor/rss_memory_limit_spec.rb39
-rw-r--r--spec/lib/gitlab/memory/watchdog_spec.rb10
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb4
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb2
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/global_search_slis_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb81
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb47
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb20
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb25
-rw-r--r--spec/lib/gitlab/observability_spec.rb33
-rw-r--r--spec/lib/gitlab/octokit/middleware_spec.rb8
-rw-r--r--spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb2
-rw-r--r--spec/lib/gitlab/pagination_delegate_spec.rb157
-rw-r--r--spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb4
-rw-r--r--spec/lib/gitlab/project_template_spec.rb14
-rw-r--r--spec/lib/gitlab/qa_spec.rb29
-rw-r--r--spec/lib/gitlab/query_limiting/transaction_spec.rb3
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb544
-rw-r--r--spec/lib/gitlab/request_forgery_protection_spec.rb6
-rw-r--r--spec/lib/gitlab/runtime_spec.rb2
-rw-r--r--spec/lib/gitlab/service_desk_email_spec.rb40
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb312
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb322
-rw-r--r--spec/lib/gitlab/slash_commands/application_help_spec.rb22
-rw-r--r--spec/lib/gitlab/slash_commands/command_spec.rb20
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/incident_management/incident_new_spec.rb15
-rw-r--r--spec/lib/gitlab/sql/pattern_spec.rb45
-rw-r--r--spec/lib/gitlab/tracking/helpers/weak_password_error_event_spec.rb45
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb51
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb20
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb434
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_merge_request_authors_metric_spec.rb25
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb63
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb19
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_metric_spec.rb33
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_period_setting_metric_spec.rb21
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_setting_enabled_metric_spec.rb21
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb55
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb52
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb24
-rw-r--r--spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb13
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb19
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/where_constraints_spec.rb (renamed from spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb)7
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb35
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb1
-rw-r--r--spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb6
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb196
-rw-r--r--spec/lib/gitlab/utils/strong_memoize_spec.rb57
-rw-r--r--spec/lib/gitlab/utils_spec.rb14
-rw-r--r--spec/lib/gitlab/webpack/file_loader_spec.rb4
227 files changed, 7706 insertions, 3714 deletions
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
index b4aa843bcd7..258f4a0d019 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
@@ -38,6 +38,6 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
merge_request2.metrics.update!(merged_at: Time.zone.now)
end
- expect(subject).to be_within(0.5).of(7.5.minutes.seconds)
+ expect(subject).to be_within(5.seconds).of(7.5.minutes.seconds)
end
end
diff --git a/spec/lib/gitlab/app_logger_spec.rb b/spec/lib/gitlab/app_logger_spec.rb
index 23bac444dbe..85ca60d539f 100644
--- a/spec/lib/gitlab/app_logger_spec.rb
+++ b/spec/lib/gitlab/app_logger_spec.rb
@@ -5,10 +5,9 @@ require 'spec_helper'
RSpec.describe Gitlab::AppLogger do
subject { described_class }
- it 'builds a Gitlab::Logger object twice' do
- expect(Gitlab::Logger).to receive(:new)
- .exactly(described_class.loggers.size)
- .and_call_original
+ it 'builds two Logger instances' do
+ expect(Gitlab::Logger).to receive(:new).and_call_original
+ expect(Gitlab::JsonLogger).to receive(:new).and_call_original
subject.info('Hello World!')
end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 8b2a228b935..58d462aa27f 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::ApplicationContext do
describe '.push' do
it 'passes the expected context on to labkit' do
fake_proc = duck_type(:call)
- expected_context = { user: fake_proc, client_id: fake_proc }
+ expected_context = { user: fake_proc, user_id: fake_proc, client_id: fake_proc }
expect(Labkit::Context).to receive(:push).with(expected_context)
@@ -108,14 +108,16 @@ RSpec.describe Gitlab::ApplicationContext do
context = described_class.new(user: -> { user }, project: -> { project }, namespace: -> { subgroup })
expect(result(context))
- .to include(user: user.username, project: project.full_path, root_namespace: namespace.full_path)
+ .to include(user: user.username, user_id: user.id, project: project.full_path,
+ root_namespace: namespace.full_path)
end
it 'correctly loads the expected values when passed directly' do
context = described_class.new(user: user, project: project, namespace: subgroup)
expect(result(context))
- .to include(user: user.username, project: project.full_path, root_namespace: namespace.full_path)
+ .to include(user: user.username, user_id: user.id, project: project.full_path,
+ root_namespace: namespace.full_path)
end
it 'falls back to a projects namespace when a project is passed but no namespace' do
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 8fec8bce23e..d2eb9209f42 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -94,9 +94,18 @@ module Gitlab
# Move this test back to the items hash when removing `use_cmark_renderer` feature flag.
it "does not convert dangerous fenced code with inline script into HTML" do
input = '```mypre"><script>alert(3)</script>'
- output = "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" data-canonical-lang=\"mypre\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
+ output = <<~HTML
+ <div>
+ <div>
+ <div class=\"gl-relative markdown-code-block js-markdown-code\">
+ <pre lang=\"plaintext\" class=\"code highlight js-syntax-highlight language-plaintext\" data-canonical-lang=\"mypre\" v-pre=\"true\"><code></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </div>
+ </div>
+ HTML
- expect(render(input, context)).to include(output)
+ expect(render(input, context)).to include(output.strip)
end
it 'does not allow locked attributes to be overridden' do
@@ -360,7 +369,7 @@ module Gitlab
<div>
<div>
<div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" data-canonical-lang="js" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
+ <pre lang="javascript" class="code highlight js-syntax-highlight language-javascript" data-canonical-lang="js" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
<copy-code></copy-code>
</div>
</div>
@@ -390,7 +399,7 @@ module Gitlab
<div>class.cpp</div>
<div>
<div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" data-canonical-lang="c++" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include</span> <span class="cpf">&lt;stdio.h&gt;</span></span>
+ <pre lang="cpp" class="code highlight js-syntax-highlight language-cpp" data-canonical-lang="c++" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include</span> <span class="cpf">&lt;stdio.h&gt;</span></span>
<span id="LC2" class="line" lang="cpp"></span>
<span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
<span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
@@ -448,7 +457,7 @@ module Gitlab
stem:[2+2] is 4
MD
- expect(render(input, context)).to include('<pre data-math-style="display" class="code math js-render-math"><code>eta_x gamma</code></pre>')
+ expect(render(input, context)).to include('<pre data-math-style="display" lang="plaintext" class="code math js-render-math" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">eta_x gamma</span></code></pre>')
expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>')
end
end
@@ -567,7 +576,7 @@ module Gitlab
it 'does not allow kroki-plantuml-include to be overridden' do
input = <<~ADOC
- [plantuml, test="{counter:kroki-plantuml-include:/etc/passwd}", format="png"]
+ [plantuml, test="{counter:kroki-plantuml-include:README.md}", format="png"]
....
class BlockProcessor
@@ -578,7 +587,7 @@ module Gitlab
output = <<~HTML
<div>
<div>
- <a class=\"no-attachment-icon\" href=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"Diagram\" decoding=\"async\" class=\"lazy\" data-src=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\"></a>
+ <a class=\"no-attachment-icon\" href=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==?test=README.md\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"Diagram\" decoding=\"async\" class=\"lazy\" data-src=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==?test=README.md\"></a>
</div>
</div>
HTML
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index e2226952d15..05eca4cf70f 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -89,12 +89,13 @@ RSpec.describe Gitlab::Auth::AuthFinders do
context 'with a running job' do
let(:token) { job.token }
- if without_job_token_allowed == :error
+ case without_job_token_allowed
+ when :error
it 'returns an Unauthorized exception' do
expect { subject }.to raise_error(Gitlab::Auth::UnauthorizedError)
expect(@current_authenticated_job).to be_nil
end
- elsif without_job_token_allowed == :user
+ when :user
it 'returns the user' do
expect(subject).to eq(user)
expect(@current_authenticated_job).to eq job
diff --git a/spec/lib/gitlab/background_migration/backfill_project_namespace_details_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_namespace_details_spec.rb
new file mode 100644
index 00000000000..77d6cc43114
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_namespace_details_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectNamespaceDetails, :migration do
+ let_it_be(:namespace_details) { table(:namespace_details) }
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: projects.minimum(:id),
+ end_id: projects.maximum(:id),
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ describe '#perform' do
+ it 'creates details for all project namespaces in range' do
+ namespaces.create!(id: 5, name: 'test1', path: 'test1', description: "Some description1",
+ description_html: "Some description html1", cached_markdown_version: 4)
+ project_namespace1 = namespaces.create!(id: 6, name: 'test2', path: 'test2', type: 'Project')
+ namespaces.create!(id: 7, name: 'test3', path: 'test3', description: "Some description3",
+ description_html: "Some description html3", cached_markdown_version: 4)
+ project_namespace2 = namespaces.create!(id: 8, name: 'test4', path: 'test4', type: 'Project')
+
+ project1 = projects.create!(namespace_id: project_namespace1.id, name: 'gitlab1', path: 'gitlab1',
+ project_namespace_id: project_namespace1.id, description: "Some description2",
+ description_html: "Some description html2", cached_markdown_version: 4)
+ project2 = projects.create!(namespace_id: project_namespace2.id, name: 'gitlab2', path: 'gitlab2',
+ project_namespace_id: project_namespace2.id,
+ description: "Some description3",
+ description_html: "Some description html4", cached_markdown_version: 4)
+
+ namespace_details.delete_all
+
+ expect(namespace_details.pluck(:namespace_id)).to eql []
+
+ expect { perform_migration }
+ .to change { namespace_details.pluck(:namespace_id) }.from([]).to contain_exactly(
+ project_namespace1.id,
+ project_namespace2.id
+ )
+
+ expect(namespace_details.find_by_namespace_id(project_namespace1.id))
+ .to have_attributes(migrated_attributes(project1))
+ expect(namespace_details.find_by_namespace_id(project_namespace2.id))
+ .to have_attributes(migrated_attributes(project2))
+ end
+ end
+
+ def migrated_attributes(project)
+ {
+ description: project.description,
+ description_html: project.description_html,
+ cached_markdown_version: project.cached_markdown_version
+ }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_namespace_on_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_namespace_on_issues_spec.rb
index 29833074109..3ca7d28f09d 100644
--- a/spec/lib/gitlab/background_migration/backfill_project_namespace_on_issues_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_project_namespace_on_issues_spec.rb
@@ -54,4 +54,21 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillProjectNamespaceOnIssues do
expect { perform_migration }.to change { migration.batch_metrics.timings }
end
+
+ context 'when database timeouts' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(error_class: [ActiveRecord::StatementTimeout, ActiveRecord::QueryCanceled])
+
+ with_them do
+ it 'retries on timeout error' do
+ expect(migration).to receive(:update_batch).exactly(3).times.and_raise(error_class)
+ expect(migration).to receive(:sleep).with(5).twice
+
+ expect do
+ perform_migration
+ end.to raise_error(error_class)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb b/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb
deleted file mode 100644
index 4a65ecf8c75..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb
+++ /dev/null
@@ -1,95 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectsWithCoverage,
- :suppress_gitlab_schemas_validate_connection, schema: 20210818185845 do
- let(:projects) { table(:projects) }
- let(:project_ci_feature_usages) { table(:project_ci_feature_usages) }
- let(:ci_pipelines) { table(:ci_pipelines) }
- let(:ci_daily_build_group_report_results) { table(:ci_daily_build_group_report_results) }
- let(:group) { table(:namespaces).create!(name: 'user', path: 'user') }
- let(:project_1) { projects.create!(namespace_id: group.id) }
- let(:project_2) { projects.create!(namespace_id: group.id) }
- let(:pipeline_1) { ci_pipelines.create!(project_id: project_1.id, source: 13) }
- let(:pipeline_2) { ci_pipelines.create!(project_id: project_1.id, source: 13) }
- let(:pipeline_3) { ci_pipelines.create!(project_id: project_2.id, source: 13) }
- let(:pipeline_4) { ci_pipelines.create!(project_id: project_2.id, source: 13) }
-
- subject { described_class.new }
-
- describe '#perform' do
- before do
- ci_daily_build_group_report_results.create!(
- id: 1,
- project_id: project_1.id,
- date: 4.days.ago,
- last_pipeline_id: pipeline_1.id,
- ref_path: 'main',
- group_name: 'rspec',
- data: { coverage: 95.0 },
- default_branch: true,
- group_id: group.id
- )
-
- ci_daily_build_group_report_results.create!(
- id: 2,
- project_id: project_1.id,
- date: 3.days.ago,
- last_pipeline_id: pipeline_2.id,
- ref_path: 'main',
- group_name: 'rspec',
- data: { coverage: 95.0 },
- default_branch: true,
- group_id: group.id
- )
-
- ci_daily_build_group_report_results.create!(
- id: 3,
- project_id: project_2.id,
- date: 2.days.ago,
- last_pipeline_id: pipeline_3.id,
- ref_path: 'main',
- group_name: 'rspec',
- data: { coverage: 95.0 },
- default_branch: true,
- group_id: group.id
- )
-
- ci_daily_build_group_report_results.create!(
- id: 4,
- project_id: project_2.id,
- date: 1.day.ago,
- last_pipeline_id: pipeline_4.id,
- ref_path: 'test_branch',
- group_name: 'rspec',
- data: { coverage: 95.0 },
- default_branch: false,
- group_id: group.id
- )
-
- stub_const("#{described_class}::INSERT_DELAY_SECONDS", 0)
- end
-
- it 'creates entries per project and default_branch combination in the given range', :aggregate_failures do
- subject.perform(1, 4, 2)
-
- entries = project_ci_feature_usages.order('project_id ASC, default_branch DESC')
-
- expect(entries.count).to eq(3)
- expect(entries[0]).to have_attributes(project_id: project_1.id, feature: 1, default_branch: true)
- expect(entries[1]).to have_attributes(project_id: project_2.id, feature: 1, default_branch: true)
- expect(entries[2]).to have_attributes(project_id: project_2.id, feature: 1, default_branch: false)
- end
-
- context 'when an entry for the project and default branch combination already exists' do
- before do
- subject.perform(1, 4, 2)
- end
-
- it 'does not create a new entry' do
- expect { subject.perform(1, 4, 2) }.not_to change { project_ci_feature_usages.count }
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb b/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
new file mode 100644
index 00000000000..04ada1703bc
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
@@ -0,0 +1,222 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillUserDetailsFields, :migration, schema: 20221018232820 do
+ let(:users) { table(:users) }
+ let(:user_details) { table(:user_details) }
+
+ let!(:user_all_fields_backfill) do
+ users.create!(
+ name: generate(:name),
+ email: generate(:email),
+ projects_limit: 1,
+ linkedin: 'linked-in',
+ twitter: '@twitter',
+ skype: 'skype',
+ website_url: 'https://example.com',
+ location: 'Antarctica',
+ organization: 'Gitlab'
+ )
+ end
+
+ let!(:user_long_details_fields) do
+ length = UserDetail::DEFAULT_FIELD_LENGTH + 1
+ users.create!(
+ name: generate(:name),
+ email: generate(:email),
+ projects_limit: 1,
+ linkedin: 'l' * length,
+ twitter: 't' * length,
+ skype: 's' * length,
+ website_url: "https://#{'a' * (length - 12)}.com",
+ location: 'l' * length,
+ organization: 'o' * length
+ )
+ end
+
+ let!(:user_nil_details_fields) do
+ users.create!(
+ name: generate(:name),
+ email: generate(:email),
+ projects_limit: 1
+ )
+ end
+
+ let!(:user_empty_details_fields) do
+ users.create!(
+ name: generate(:name),
+ email: generate(:email),
+ projects_limit: 1,
+ linkedin: '',
+ twitter: '',
+ skype: '',
+ website_url: '',
+ location: '',
+ organization: ''
+ )
+ end
+
+ let!(:user_with_bio) do
+ users.create!(
+ name: generate(:name),
+ email: generate(:email),
+ projects_limit: 1,
+ linkedin: 'linked-in',
+ twitter: '@twitter',
+ skype: 'skype',
+ website_url: 'https://example.com',
+ location: 'Antarctica',
+ organization: 'Gitlab'
+ )
+ end
+
+ let!(:bio_user_details) do
+ user_details
+ .find_or_create_by!(user_id: user_with_bio.id)
+ .update!(bio: 'bio')
+ end
+
+ let!(:user_with_details) do
+ users.create!(
+ name: generate(:name),
+ email: generate(:email),
+ projects_limit: 1,
+ linkedin: 'linked-in',
+ twitter: '@twitter',
+ skype: 'skype',
+ website_url: 'https://example.com',
+ location: 'Antarctica',
+ organization: 'Gitlab'
+ )
+ end
+
+ let!(:existing_user_details) do
+ user_details
+ .find_or_create_by!(user_id: user_with_details.id)
+ .update!(
+ linkedin: 'linked-in',
+ twitter: '@twitter',
+ skype: 'skype',
+ website_url: 'https://example.com',
+ location: 'Antarctica',
+ organization: 'Gitlab'
+ )
+ end
+
+ let!(:user_different_details) do
+ users.create!(
+ name: generate(:name),
+ email: generate(:email),
+ projects_limit: 1,
+ linkedin: 'linked-in',
+ twitter: '@twitter',
+ skype: 'skype',
+ website_url: 'https://example.com',
+ location: 'Antarctica',
+ organization: 'Gitlab'
+ )
+ end
+
+ let!(:differing_details) do
+ user_details
+ .find_or_create_by!(user_id: user_different_details.id)
+ .update!(
+ linkedin: 'details-in',
+ twitter: '@details',
+ skype: 'details_skype',
+ website_url: 'https://details.site',
+ location: 'Details Location',
+ organization: 'Details Organization'
+ )
+ end
+
+ let(:user_ids) do
+ [
+ user_all_fields_backfill,
+ user_long_details_fields,
+ user_nil_details_fields,
+ user_empty_details_fields,
+ user_with_bio,
+ user_with_details,
+ user_different_details
+ ].map(&:id)
+ end
+
+ subject do
+ described_class.new(
+ start_id: user_ids.min,
+ end_id: user_ids.max,
+ batch_table: 'users',
+ batch_column: 'id',
+ sub_batch_size: 1_000,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ it 'processes all relevant records' do
+ expect { subject.perform }.to change { user_details.all.size }.to(5)
+ end
+
+ it 'backfills new user_details fields' do
+ subject.perform
+
+ user_detail = user_details.find_by!(user_id: user_all_fields_backfill.id)
+ expect(user_detail.linkedin).to eq('linked-in')
+ expect(user_detail.twitter).to eq('@twitter')
+ expect(user_detail.skype).to eq('skype')
+ expect(user_detail.website_url).to eq('https://example.com')
+ expect(user_detail.location).to eq('Antarctica')
+ expect(user_detail.organization).to eq('Gitlab')
+ end
+
+ it 'does not migrate nil fields' do
+ subject.perform
+
+ expect(user_details.find_by(user_id: user_nil_details_fields)).to be_nil
+ end
+
+ it 'does not migrate empty fields' do
+ subject.perform
+
+ expect(user_details.find_by(user_id: user_empty_details_fields)).to be_nil
+ end
+
+ it 'backfills new fields without overwriting existing `bio` field' do
+ subject.perform
+
+ user_detail = user_details.find_by!(user_id: user_with_bio.id)
+ expect(user_detail.bio).to eq('bio')
+ expect(user_detail.linkedin).to eq('linked-in')
+ expect(user_detail.twitter).to eq('@twitter')
+ expect(user_detail.skype).to eq('skype')
+ expect(user_detail.website_url).to eq('https://example.com')
+ expect(user_detail.location).to eq('Antarctica')
+ expect(user_detail.organization).to eq('Gitlab')
+ end
+
+ context 'when user details are unchanged' do
+ it 'does not change existing details' do
+ expect { subject.perform }.not_to change {
+ user_details.find_by!(user_id: user_with_details.id).attributes
+ }
+ end
+ end
+
+ context 'when user details are changed' do
+ it 'updates existing user details' do
+ expect { subject.perform }.to change {
+ user_details.find_by!(user_id: user_different_details.id).attributes
+ }
+
+ user_detail = user_details.find_by!(user_id: user_different_details.id)
+ expect(user_detail.linkedin).to eq('linked-in')
+ expect(user_detail.twitter).to eq('@twitter')
+ expect(user_detail.skype).to eq('skype')
+ expect(user_detail.website_url).to eq('https://example.com')
+ expect(user_detail.location).to eq('Antarctica')
+ expect(user_detail.organization).to eq('Gitlab')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
index f03f90ddbbb..95be14cefb1 100644
--- a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
@@ -57,6 +57,71 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
end
end
+ describe '.operation_name' do
+ subject(:perform_job) { job_instance.perform }
+
+ let(:job_instance) do
+ job_class.new(start_id: 1, end_id: 10,
+ batch_table: '_test_table',
+ batch_column: 'id',
+ sub_batch_size: 2,
+ pause_ms: 1000,
+ job_arguments: %w(a b),
+ connection: connection)
+ end
+
+ let(:job_class) do
+ Class.new(described_class) do
+ operation_name :update_all
+ end
+ end
+
+ it 'defines method' do
+ expect(job_instance.operation_name).to eq(:update_all)
+ end
+
+ context 'when `operation_name` is not defined' do
+ let(:job_class) do
+ Class.new(described_class) do
+ def perform
+ each_sub_batch do |sub_batch|
+ sub_batch.update_all('to_column = from_column')
+ end
+ end
+ end
+ end
+
+ let(:test_table) { table(:_test_table) }
+ let(:test_insert_table) { table(:_test_insert_table) }
+
+ before do
+ allow(job_instance).to receive(:sleep)
+
+ connection.create_table :_test_table do |t|
+ t.timestamps_with_timezone null: false
+ t.integer :from_column, null: false
+ end
+
+ connection.create_table :_test_insert_table, id: false do |t|
+ t.integer :to_column
+ t.index :to_column, unique: true
+ end
+
+ test_table.create!(id: 1, from_column: 5)
+ test_table.create!(id: 2, from_column: 10)
+ end
+
+ after do
+ connection.drop_table(:_test_table)
+ connection.drop_table(:_test_insert_table)
+ end
+
+ it 'raises an exception' do
+ expect { perform_job }.to raise_error(RuntimeError, /Operation name is required/)
+ end
+ end
+ end
+
describe '.scope_to' do
subject(:job_instance) do
job_class.new(start_id: 1, end_id: 10,
@@ -133,9 +198,10 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
context 'when the subclass uses sub-batching' do
let(:job_class) do
Class.new(described_class) do
+ operation_name :update
+
def perform(*job_arguments)
each_sub_batch(
- operation_name: :update,
batching_arguments: { order_hint: :updated_at },
batching_scope: -> (relation) { relation.where.not(bar: nil) }
) do |sub_batch|
@@ -177,10 +243,10 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
let(:job_class) do
Class.new(described_class) do
scope_to ->(r) { r.where('mod(id, 2) = 0') }
+ operation_name :update
def perform(*job_arguments)
each_sub_batch(
- operation_name: :update,
batching_arguments: { order_hint: :updated_at },
batching_scope: -> (relation) { relation.where.not(bar: nil) }
) do |sub_batch|
@@ -237,8 +303,10 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
let(:job_class) do
Class.new(described_class) do
+ operation_name :insert
+
def perform(*job_arguments)
- distinct_each_batch(operation_name: :insert) do |sub_batch|
+ distinct_each_batch do |sub_batch|
sub_batch.pluck(:from_column).each do |value|
connection.execute("INSERT INTO _test_insert_table VALUES (#{value})")
end
@@ -291,9 +359,10 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
let(:job_class) do
Class.new(described_class) do
scope_to ->(r) { r.where.not(from_column: 10) }
+ operation_name :insert
def perform(*job_arguments)
- distinct_each_batch(operation_name: :insert) do |sub_batch|
+ distinct_each_batch do |sub_batch|
end
end
end
diff --git a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
index 264faa4de3b..c522c8b307f 100644
--- a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
+++ b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
@@ -241,7 +241,7 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover, :aggregate_failur
context 'when legacy uploads are stored in object storage' do
let(:legacy_upload) { create_remote_upload(note, filename) }
let(:remote_file) do
- { key: "#{legacy_upload.path}" }
+ { key: legacy_upload.path.to_s }
end
let(:connection) { ::Fog::Storage.new(FileUploader.object_store_credentials) }
diff --git a/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb b/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
new file mode 100644
index 00000000000..74f674e052d
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_projects_star_count_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateProjectsStarCount, schema: 20221019105041 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:users_star_projects) { table(:users_star_projects) }
+
+ let(:namespace1) { namespaces.create!(name: 'namespace 1', path: 'namespace1') }
+ let(:namespace2) { namespaces.create!(name: 'namespace 2', path: 'namespace2') }
+ let(:namespace3) { namespaces.create!(name: 'namespace 3', path: 'namespace3') }
+ let(:namespace4) { namespaces.create!(name: 'namespace 4', path: 'namespace4') }
+ let(:namespace5) { namespaces.create!(name: 'namespace 5', path: 'namespace5') }
+
+ let(:project1) { projects.create!(namespace_id: namespace1.id, project_namespace_id: namespace1.id) }
+ let(:project2) { projects.create!(namespace_id: namespace2.id, project_namespace_id: namespace2.id) }
+ let(:project3) { projects.create!(namespace_id: namespace3.id, project_namespace_id: namespace3.id) }
+ let(:project4) { projects.create!(namespace_id: namespace4.id, project_namespace_id: namespace4.id) }
+ let(:project5) { projects.create!(namespace_id: namespace5.id, project_namespace_id: namespace5.id) }
+
+ let(:user_active) { users.create!(state: 'active', email: 'test1@example.com', projects_limit: 5) }
+ let(:user_blocked) { users.create!(state: 'blocked', email: 'test2@example.com', projects_limit: 5) }
+
+ let(:migration) do
+ described_class.new(
+ start_id: project1.id,
+ end_id: project4.id,
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 2,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ subject(:perform_migration) { migration.perform }
+
+ it 'correctly populates the star counters' do
+ users_star_projects.create!(project_id: project1.id, user_id: user_active.id)
+ users_star_projects.create!(project_id: project2.id, user_id: user_blocked.id)
+ users_star_projects.create!(project_id: project4.id, user_id: user_active.id)
+ users_star_projects.create!(project_id: project4.id, user_id: user_blocked.id)
+ users_star_projects.create!(project_id: project5.id, user_id: user_active.id)
+
+ perform_migration
+
+ expect(project1.reload.star_count).to eq(1)
+ expect(project2.reload.star_count).to eq(0)
+ expect(project3.reload.star_count).to eq(0)
+ expect(project4.reload.star_count).to eq(1)
+ expect(project5.reload.star_count).to eq(0)
+ end
+
+ context 'when database timeouts' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(error_class: [ActiveRecord::StatementTimeout, ActiveRecord::QueryCanceled])
+
+ with_them do
+ it 'retries on timeout error' do
+ expect(migration).to receive(:update_batch).exactly(3).times.and_raise(error_class)
+ expect(migration).to receive(:sleep).with(5).twice
+
+ expect do
+ perform_migration
+ end.to raise_error(error_class)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb
index 3de84a4e880..fc06012ed20 100644
--- a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityReads, :migrati
project_id: project.id,
external_type: 'uuid-v5',
external_id: 'uuid-v5',
- fingerprint: Digest::SHA1.hexdigest("#{vulnerability.id}"),
+ fingerprint: Digest::SHA1.hexdigest(vulnerability.id.to_s),
name: 'Identifier for UUIDv5')
create_finding!(
diff --git a/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb b/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb
index 41266cb24da..10597e65910 100644
--- a/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_backfilled_job_artifacts_expire_at_spec.rb
@@ -85,8 +85,9 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveBackfilledJobArtifactsExpireAt
private
def create_job_artifact(id:, file_type:, expire_at:)
- job = table(:ci_builds, database: :ci).create!(id: id)
- job_artifact.create!(id: id, job_id: job.id, expire_at: expire_at, project_id: project.id, file_type: file_type)
+ job = table(:ci_builds, database: :ci).create!(id: id, partition_id: 100)
+ job_artifact.create!(id: id, job_id: job.id, expire_at: expire_at, project_id: project.id,
+ file_type: file_type, partition_id: 100)
end
end
end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb
deleted file mode 100644
index b6da8f7fc2d..00000000000
--- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValues,
- :migration,
- schema: 20220922143634 do
- it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchedMigrationJob }
-
- describe '#perform' do
- let(:ci_runners) { table(:ci_runners, database: :ci) }
-
- let(:test_worker) do
- described_class.new(
- start_id: 1,
- end_id: 4,
- batch_table: :ci_runners,
- batch_column: :id,
- sub_batch_size: 2,
- pause_ms: 0,
- connection: Ci::ApplicationRecord.connection
- )
- end
-
- subject(:perform) { test_worker.perform }
-
- before do
- ci_runners.create!(id: 1, runner_type: 1, token_encrypted: 'duplicate')
- ci_runners.create!(id: 2, runner_type: 1, token_encrypted: 'a-token')
- ci_runners.create!(id: 3, runner_type: 1, token_encrypted: 'duplicate-2')
- ci_runners.create!(id: 4, runner_type: 1, token_encrypted: nil)
- ci_runners.create!(id: 5, runner_type: 1, token_encrypted: 'duplicate-2')
- ci_runners.create!(id: 6, runner_type: 1, token_encrypted: 'duplicate')
- ci_runners.create!(id: 7, runner_type: 1, token_encrypted: 'another-token')
- ci_runners.create!(id: 8, runner_type: 1, token_encrypted: 'another-token')
- end
-
- it 'nullifies duplicate encrypted tokens', :aggregate_failures do
- expect { perform }.to change { ci_runners.all.order(:id).pluck(:id, :token_encrypted).to_h }
- .from(
- {
- 1 => 'duplicate',
- 2 => 'a-token',
- 3 => 'duplicate-2',
- 4 => nil,
- 5 => 'duplicate-2',
- 6 => 'duplicate',
- 7 => 'another-token',
- 8 => 'another-token'
- }
- )
- .to(
- {
- 1 => nil,
- 2 => 'a-token',
- 3 => nil,
- 4 => nil,
- 5 => nil,
- 6 => nil,
- 7 => 'another-token',
- 8 => 'another-token'
- }
- )
- expect(ci_runners.count).to eq(8)
- expect(ci_runners.pluck(:token_encrypted).uniq).to match_array [
- nil, 'a-token', 'another-token'
- ]
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb
deleted file mode 100644
index 423b1815e75..00000000000
--- a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValues,
- :migration,
- schema: 20220922143143 do
- it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchedMigrationJob }
-
- describe '#perform' do
- let(:ci_runners) { table(:ci_runners, database: :ci) }
-
- let(:test_worker) do
- described_class.new(
- start_id: 1,
- end_id: 4,
- batch_table: :ci_runners,
- batch_column: :id,
- sub_batch_size: 2,
- pause_ms: 0,
- connection: Ci::ApplicationRecord.connection
- )
- end
-
- subject(:perform) { test_worker.perform }
-
- before do
- ci_runners.create!(id: 1, runner_type: 1, token: 'duplicate')
- ci_runners.create!(id: 2, runner_type: 1, token: 'a-token')
- ci_runners.create!(id: 3, runner_type: 1, token: 'duplicate-2')
- ci_runners.create!(id: 4, runner_type: 1, token: nil)
- ci_runners.create!(id: 5, runner_type: 1, token: 'duplicate-2')
- ci_runners.create!(id: 6, runner_type: 1, token: 'duplicate')
- ci_runners.create!(id: 7, runner_type: 1, token: 'another-token')
- ci_runners.create!(id: 8, runner_type: 1, token: 'another-token')
- end
-
- it 'nullifies duplicate tokens', :aggregate_failures do
- expect { perform }.to change { ci_runners.all.order(:id).pluck(:id, :token).to_h }
- .from(
- {
- 1 => 'duplicate',
- 2 => 'a-token',
- 3 => 'duplicate-2',
- 4 => nil,
- 5 => 'duplicate-2',
- 6 => 'duplicate',
- 7 => 'another-token',
- 8 => 'another-token'
- }
- )
- .to(
- {
- 1 => nil,
- 2 => 'a-token',
- 3 => nil,
- 4 => nil,
- 5 => nil,
- 6 => nil,
- 7 => 'another-token',
- 8 => 'another-token'
- }
- )
- expect(ci_runners.count).to eq(8)
- expect(ci_runners.pluck(:token).uniq).to match_array [
- nil, 'a-token', 'another-token'
- ]
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb b/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb
new file mode 100644
index 00000000000..2c5c47e39c9
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::SanitizeConfidentialTodos, :migration, schema: 20221110045406 do
+ let(:todos) { table(:todos) }
+ let(:notes) { table(:notes) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_features) { table(:project_features) }
+ let(:users) { table(:users) }
+ let(:issues) { table(:issues) }
+ let(:members) { table(:members) }
+ let(:project_authorizations) { table(:project_authorizations) }
+
+ let(:user) { users.create!(first_name: 'Test', last_name: 'User', email: 'test@user.com', projects_limit: 1) }
+ let(:project_namespace1) { namespaces.create!(path: 'pns1', name: 'pns1') }
+ let(:project_namespace2) { namespaces.create!(path: 'pns2', name: 'pns2') }
+
+ let(:project1) do
+ projects.create!(namespace_id: project_namespace1.id,
+ project_namespace_id: project_namespace1.id, visibility_level: 20)
+ end
+
+ let(:project2) do
+ projects.create!(namespace_id: project_namespace2.id,
+ project_namespace_id: project_namespace2.id)
+ end
+
+ let(:issue1) { issues.create!(project_id: project1.id, issue_type: 1, title: 'issue1', author_id: user.id) }
+ let(:issue2) { issues.create!(project_id: project2.id, issue_type: 1, title: 'issue2') }
+
+ let(:public_note) { notes.create!(note: 'text', project_id: project1.id) }
+
+ let(:confidential_note) do
+ notes.create!(note: 'text', project_id: project1.id, confidential: true,
+ noteable_id: issue1.id, noteable_type: 'Issue')
+ end
+
+ let(:other_confidential_note) do
+ notes.create!(note: 'text', project_id: project2.id, confidential: true,
+ noteable_id: issue2.id, noteable_type: 'Issue')
+ end
+
+ let(:common_params) { { user_id: user.id, author_id: user.id, action: 1, state: 'pending', target_type: 'Note' } }
+ let!(:ignored_todo1) { todos.create!(**common_params) }
+ let!(:ignored_todo2) { todos.create!(**common_params, target_id: public_note.id, note_id: public_note.id) }
+ let!(:valid_todo) { todos.create!(**common_params, target_id: confidential_note.id, note_id: confidential_note.id) }
+ let!(:invalid_todo) do
+ todos.create!(**common_params, target_id: other_confidential_note.id, note_id: other_confidential_note.id)
+ end
+
+ describe '#perform' do
+ before do
+ project_features.create!(project_id: project1.id, issues_access_level: 20, pages_access_level: 20)
+ members.create!(state: 0, source_id: project1.id, source_type: 'Project',
+ type: 'ProjectMember', user_id: user.id, access_level: 50, notification_level: 0,
+ member_namespace_id: project_namespace1.id)
+ project_authorizations.create!(project_id: project1.id, user_id: user.id, access_level: 50)
+ end
+
+ subject(:perform) do
+ described_class.new(
+ start_id: notes.minimum(:id),
+ end_id: notes.maximum(:id),
+ batch_table: :notes,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ ).perform
+ end
+
+ it 'deletes todos where user can not read its note and logs deletion', :aggregate_failures do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger).to receive(:info).with(
+ hash_including(
+ message: "#{described_class.name} deleting invalid todo",
+ attributes: hash_including(invalid_todo.attributes.slice(:id, :user_id, :target_id, :target_type))
+ )
+ ).once
+ end
+
+ expect { perform }.to change(todos, :count).by(-1)
+
+ expect(todos.all).to match_array([ignored_todo1, ignored_todo2, valid_todo])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb b/spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb
index 98939e15952..fad10aba882 100644
--- a/spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb
@@ -26,8 +26,8 @@ RSpec.describe Gitlab::BackgroundMigration::UpdateCiPipelineArtifactsUnknownLock
let(:locked) { 1 }
let(:unknown) { 2 }
- let(:unlocked_pipeline) { pipelines.create!(locked: unlocked) }
- let(:locked_pipeline) { pipelines.create!(locked: locked) }
+ let(:unlocked_pipeline) { pipelines.create!(locked: unlocked, partition_id: 100) }
+ let(:locked_pipeline) { pipelines.create!(locked: locked, partition_id: 100) }
# rubocop:disable Layout/LineLength
let!(:locked_artifact) { pipeline_artifacts.create!(project_id: project.id, pipeline_id: locked_pipeline.id, size: 1024, file_type: 0, file_format: 'gzip', file: 'a.gz', locked: unknown) }
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 186d4e1fb42..f83ce01c617 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
before do
stub_omniauth_provider('bitbucket')
- stub_feature_flags(stricter_mr_branch_name: false)
end
let(:statuses) do
diff --git a/spec/lib/gitlab/cache/metrics_spec.rb b/spec/lib/gitlab/cache/metrics_spec.rb
new file mode 100644
index 00000000000..d8103837708
--- /dev/null
+++ b/spec/lib/gitlab/cache/metrics_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::Metrics do
+ subject(:metrics) do
+ described_class.new(
+ caller_id: caller_id,
+ cache_identifier: cache_identifier,
+ feature_category: feature_category,
+ backing_resource: backing_resource
+ )
+ end
+
+ let(:caller_id) { 'caller-id' }
+ let(:cache_identifier) { 'ApplicationController#show' }
+ let(:feature_category) { :source_code_management }
+ let(:backing_resource) { :unknown }
+
+ let(:counter_mock) { instance_double(Prometheus::Client::Counter) }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(
+ :redis_hit_miss_operations_total,
+ 'Hit/miss Redis cache counter'
+ ).and_return(counter_mock)
+ end
+
+ describe '#initialize' do
+ context 'when backing resource is not supported' do
+ let(:backing_resource) { 'foo' }
+
+ it { expect { metrics }.to raise_error(RuntimeError) }
+
+ context 'when on production' do
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ end
+
+ it 'does not raise an exception' do
+ expect { metrics }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ describe '#increment_cache_hit' do
+ subject { metrics.increment_cache_hit }
+
+ it 'increments number of hits' do
+ expect(counter_mock)
+ .to receive(:increment)
+ .with(
+ {
+ caller_id: caller_id,
+ cache_identifier: cache_identifier,
+ feature_category: feature_category,
+ backing_resource: backing_resource,
+ cache_hit: true
+ }
+ ).once
+
+ subject
+ end
+ end
+
+ describe '#increment_cache_miss' do
+ subject { metrics.increment_cache_miss }
+
+ it 'increments number of misses' do
+ expect(counter_mock)
+ .to receive(:increment)
+ .with(
+ {
+ caller_id: caller_id,
+ cache_identifier: cache_identifier,
+ feature_category: feature_category,
+ backing_resource: backing_resource,
+ cache_hit: false
+ }
+ ).once
+
+ subject
+ end
+ end
+
+ describe '#observe_cache_generation' do
+ subject do
+ metrics.observe_cache_generation { action }
+ end
+
+ let(:action) { 'action' }
+ let(:histogram_mock) { instance_double(Prometheus::Client::Histogram) }
+
+ before do
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100.0, 500.0)
+ end
+
+ it 'updates histogram metric' do
+ expect(Gitlab::Metrics).to receive(:histogram).with(
+ :redis_cache_generation_duration_seconds,
+ 'Duration of Redis cache generation',
+ {
+ caller_id: caller_id,
+ cache_identifier: cache_identifier,
+ feature_category: feature_category,
+ backing_resource: backing_resource
+ },
+ [0, 1, 5]
+ ).and_return(histogram_mock)
+
+ expect(histogram_mock).to receive(:observe).with({}, 400.0)
+
+ is_expected.to eq(action)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
index 3468094ffa5..abad2bfa905 100644
--- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -9,13 +9,26 @@ RSpec.describe Gitlab::Checks::LfsIntegrity do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:newrev) do
- operations = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- BareRepoOperations.new(repository.path)
- end
+ newrev = repository.commit_files(
+ project.creator,
+ branch_name: 'lfs_integrity_spec',
+ message: 'New LFS objects',
+ actions: [{
+ action: :create,
+ file_path: 'files/lfs/some.iso',
+ content: <<~LFS
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897
+ size 1575078
+ LFS
+ }]
+ )
# Create a commit not pointed at by any ref to emulate being in the
# pre-receive hook so that `--not --all` returns some objects
- operations.commit_tree('8856a329dd38ca86dfb9ce5aa58a16d88cc119bd', "New LFS objects")
+ repository.delete_branch('lfs_integrity_spec')
+
+ newrev
end
let(:newrevs) { [newrev] }
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb
index f9ebab149a5..647653f8e9e 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb
@@ -4,11 +4,37 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists do
describe '#satisfied_by?' do
- shared_examples 'an exists rule with a context' do
+ subject(:satisfied_by?) { described_class.new(globs).satisfied_by?(nil, context) }
+
+ shared_examples 'a rules:exists with a context' do
it_behaves_like 'a glob matching rule' do
let(:project) { create(:project, :custom_repo, files: files) }
end
+ context 'when the rules:exists has a variable' do
+ let_it_be(:project) { create(:project, :custom_repo, files: { 'helm/helm_file.txt' => '' }) }
+
+ let(:globs) { ['$HELM_DIR/**/*'] }
+
+ let(:variables_hash) do
+ { 'HELM_DIR' => 'helm' }
+ end
+
+ before do
+ allow(context).to receive(:variables_hash).and_return(variables_hash)
+ end
+
+ context 'when the context has the specified variables' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when variable expansion does not match' do
+ let(:variables_hash) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
context 'after pattern comparision limit is reached' do
let(:globs) { ['*definitely_not_a_matching_glob*'] }
let(:project) { create(:project, :repository) }
@@ -22,26 +48,24 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists do
end
end
- subject(:satisfied_by?) { described_class.new(globs).satisfied_by?(nil, context) }
-
- context 'when context is Build::Context::Build' do
- it_behaves_like 'an exists rule with a context' do
+ context 'when the rules are being evaluated at job level' do
+ it_behaves_like 'a rules:exists with a context' do
let(:pipeline) { build(:ci_pipeline, project: project, sha: project.repository.commit.sha) }
let(:context) { Gitlab::Ci::Build::Context::Build.new(pipeline, sha: project.repository.commit.sha) }
end
end
- context 'when context is Build::Context::Global' do
- it_behaves_like 'an exists rule with a context' do
+ context 'when the rules are being evaluated for an entire pipeline' do
+ it_behaves_like 'a rules:exists with a context' do
let(:pipeline) { build(:ci_pipeline, project: project, sha: project.repository.commit.sha) }
let(:context) { Gitlab::Ci::Build::Context::Global.new(pipeline, yaml_variables: {}) }
end
end
- context 'when context is Config::External::Context' do
+ context 'when rules are being evaluated with `include`' do
let(:context) { Gitlab::Ci::Config::External::Context.new(project: project, sha: sha) }
- it_behaves_like 'an exists rule with a context' do
+ it_behaves_like 'a rules:exists with a context' do
let(:sha) { project.repository.commit.sha }
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index c56f2d25074..8da46561b73 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
- subject { described_class.new(config, name: :my_bridge) }
+ subject(:entry) { described_class.new(config, name: :my_bridge) }
it_behaves_like 'with inheritable CI config' do
let(:inheritable_key) { 'default' }
@@ -380,4 +380,38 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
end
end
end
+
+ describe '#when' do
+ context 'when bridge is a manual action' do
+ let(:config) { { script: 'deploy', when: 'manual' } }
+
+ it { expect(entry.when).to eq('manual') }
+ end
+
+ context 'when bridge has no `when` attribute' do
+ let(:config) { { script: 'deploy' } }
+
+ it { expect(entry.when).to be_nil }
+ end
+
+ context 'when the `when` keyword is not a string' do
+ context 'when it is an array' do
+ let(:config) { { script: 'exit 0', when: ['always'] } }
+
+ it 'returns error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'bridge when should be a string'
+ end
+ end
+
+ context 'when it is a boolean' do
+ let(:config) { { script: 'exit 0', when: true } }
+
+ it 'returns error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'bridge when should be a string'
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 75ac2ca87ab..acf60a6cdda 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -317,6 +317,26 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
+ context 'when the `when` keyword is not a string' do
+ context 'when it is an array' do
+ let(:config) { { script: 'exit 0', when: ['always'] } }
+
+ it 'returns error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job when should be a string'
+ end
+ end
+
+ context 'when it is a boolean' do
+ let(:config) { { script: 'exit 0', when: true } }
+
+ it 'returns error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job when should be a string'
+ end
+ end
+ end
+
context 'when only: is used with rules:' do
let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
@@ -653,7 +673,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
with_them do
let(:config) { { script: 'ls', rules: rules, only: only }.compact }
- it "#{name}" do
+ it name.to_s do
expect(workflow).to receive(:has_rules?) { has_workflow_rules? }
entry.compose!(deps)
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index ad90dd59585..f1578a068b9 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -208,7 +208,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
it 'reports error about variable' do
expect(entry.errors)
- .to include 'variables:var2 config must be a string'
+ .to include 'variables:var2 config uses invalid data keys: description'
end
end
end
@@ -248,7 +248,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
with_them do
let(:config) { { script: 'ls', rules: rules, only: only }.compact }
- it "#{name}" do
+ it name.to_s do
expect(workflow).to receive(:has_rules?) { has_workflow_rules? }
entry.compose!(deps)
@@ -447,6 +447,29 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
)
end
end
+
+ context 'when variables have "expand" data' do
+ let(:config) do
+ {
+ script: 'echo',
+ variables: { 'VAR1' => 'val 1',
+ 'VAR2' => { value: 'val 2', expand: false },
+ 'VAR3' => { value: 'val 3', expand: true } }
+ }
+ end
+
+ it 'returns correct value' do
+ expect(entry.value).to eq(
+ name: :rspec,
+ stage: 'test',
+ only: { refs: %w[branches tags] },
+ job_variables: { 'VAR1' => { value: 'val 1' },
+ 'VAR2' => { value: 'val 2', raw: true },
+ 'VAR3' => { value: 'val 3', raw: false } },
+ root_variables_inheritance: true
+ )
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index a55e13e7c2d..085293d7368 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -316,6 +316,35 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
end
end
end
+
+ context 'when variables have "expand" data' do
+ let(:hash) do
+ {
+ variables: { 'VAR1' => 'val 1',
+ 'VAR2' => { value: 'val 2', expand: false },
+ 'VAR3' => { value: 'val 3', expand: true } },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ before do
+ root.compose!
+ end
+
+ it 'returns correct value' do
+ expect(root.variables_entry.value_with_data).to eq(
+ 'VAR1' => { value: 'val 1' },
+ 'VAR2' => { value: 'val 2', raw: true },
+ 'VAR3' => { value: 'val 3', raw: false }
+ )
+
+ expect(root.variables_value).to eq(
+ 'VAR1' => 'val 1',
+ 'VAR2' => 'val 2',
+ 'VAR3' => 'val 3'
+ )
+ end
+ end
end
context 'when configuration is not valid' do
diff --git a/spec/lib/gitlab/ci/config/entry/variable_spec.rb b/spec/lib/gitlab/ci/config/entry/variable_spec.rb
index 076a5b32e92..d7023072312 100644
--- a/spec/lib/gitlab/ci/config/entry/variable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variable_spec.rb
@@ -92,6 +92,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
+ it { is_expected.to eq(value: 'value') }
+ end
+
+ describe '#value_with_prefill_data' do
+ subject(:value_with_prefill_data) { entry.value_with_prefill_data }
+
it { is_expected.to eq(value: 'value', description: 'description') }
end
@@ -107,6 +113,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
+ it { is_expected.to eq(value: 'value') }
+ end
+
+ describe '#value_with_prefill_data' do
+ subject(:value_with_prefill_data) { entry.value_with_prefill_data }
+
it { is_expected.to eq(value: 'value', description: 'description') }
end
end
@@ -123,6 +135,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
+ it { is_expected.to eq(value: '123') }
+ end
+
+ describe '#value_with_prefill_data' do
+ subject(:value_with_prefill_data) { entry.value_with_prefill_data }
+
it { is_expected.to eq(value: '123', description: 'description') }
end
end
@@ -139,6 +157,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
+ it { is_expected.to eq(value: 'value') }
+ end
+
+ describe '#value_with_prefill_data' do
+ subject(:value_with_prefill_data) { entry.value_with_prefill_data }
+
it { is_expected.to eq(value: 'value', description: :description) }
end
end
@@ -192,6 +216,94 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
it { is_expected.to eq(value: 'value') }
end
+
+ describe '#value_with_prefill_data' do
+ subject(:value_with_prefill_data) { entry.value_with_prefill_data }
+
+ it { is_expected.to eq(value: 'value') }
+ end
+ end
+ end
+
+ context 'when config is a hash with expand' do
+ let(:config) { { value: 'value', expand: false } }
+
+ context 'when metadata allowed_value_data is not provided' do
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ subject(:errors) { entry.errors }
+
+ it { is_expected.to include 'var1 config must be a string' }
+ end
+ end
+
+ context 'when metadata allowed_value_data is (value, expand)' do
+ let(:metadata) { { allowed_value_data: %i[value expand] } }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ it { is_expected.to eq('value') }
+ end
+
+ describe '#value_with_data' do
+ subject(:value_with_data) { entry.value_with_data }
+
+ it { is_expected.to eq(value: 'value', raw: true) }
+
+ context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
+ before do
+ stub_feature_flags(ci_raw_variables_in_yaml_config: false)
+ end
+
+ it { is_expected.to eq(value: 'value') }
+ end
+ end
+
+ context 'when config expand is true' do
+ let(:config) { { value: 'value', expand: true } }
+
+ describe '#value_with_data' do
+ subject(:value_with_data) { entry.value_with_data }
+
+ it { is_expected.to eq(value: 'value', raw: false) }
+ end
+ end
+
+ context 'when config expand is a string' do
+ let(:config) { { value: 'value', expand: "true" } }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ subject(:errors) { entry.errors }
+
+ it { is_expected.to include 'var1 config expand should be a boolean value' }
+ end
+ end
+ end
+
+ context 'when metadata allowed_value_data is (value, xyz)' do
+ let(:metadata) { { allowed_value_data: %i[value xyz] } }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ subject(:errors) { entry.errors }
+
+ it { is_expected.to include 'var1 config uses invalid data keys: expand' }
+ end
end
end
end
@@ -229,6 +341,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
describe '#value_with_data' do
subject(:value_with_data) { entry.value_with_data }
+ it { is_expected.to eq(value: 'value') }
+ end
+
+ describe '#value_with_prefill_data' do
+ subject(:value_with_prefill_data) { entry.value_with_prefill_data }
+
it { is_expected.to eq(value: 'value', description: 'description', value_options: %w[value value2]) }
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
index 085f304094e..609e4422d5c 100644
--- a/spec/lib/gitlab/ci/config/entry/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
@@ -66,6 +66,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
)
end
end
+
+ describe '#value_with_prefill_data' do
+ it 'returns variable with prefill data' do
+ expect(entry.value_with_prefill_data).to eq(
+ 'VARIABLE_1' => { value: 'value 1' },
+ 'VARIABLE_2' => { value: 'value 2' }
+ )
+ end
+ end
end
context 'with numeric keys and values in the config' do
@@ -119,6 +128,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
describe '#value_with_data' do
it 'returns variable with data' do
expect(entry.value_with_data).to eq(
+ 'VARIABLE_1' => { value: 'value' }
+ )
+ end
+ end
+
+ describe '#value_with_prefill_data' do
+ it 'returns variable with prefill data' do
+ expect(entry.value_with_prefill_data).to eq(
'VARIABLE_1' => { value: 'value', description: 'variable 1' }
)
end
@@ -147,6 +164,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
describe '#value_with_data' do
it 'returns variable with data' do
expect(entry.value_with_data).to eq(
+ 'VARIABLE_1' => { value: 'value1' }
+ )
+ end
+ end
+
+ describe '#value_with_prefill_data' do
+ it 'returns variable with prefill data' do
+ expect(entry.value_with_prefill_data).to eq(
'VARIABLE_1' => { value: 'value1', value_options: %w[value1 value2], description: 'variable 1' }
)
end
@@ -174,6 +199,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
describe '#value_with_data' do
it 'returns variable with data' do
expect(entry.value_with_data).to eq(
+ 'VARIABLE_1' => { value: 'value 1' },
+ 'VARIABLE_2' => { value: 'value 2' }
+ )
+ end
+ end
+
+ describe '#value_with_prefill_data' do
+ it 'returns variable with prefill data' do
+ expect(entry.value_with_prefill_data).to eq(
'VARIABLE_1' => { value: 'value 1', description: 'variable 1' },
'VARIABLE_2' => { value: 'value 2' }
)
diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
index 1306d61d99c..8475c3a8b19 100644
--- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
@@ -14,6 +14,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do
super
end
+
+ def validate_context!
+ # no-op
+ end
end
end
@@ -95,6 +99,24 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do
expect(file.error_message).to eq('Included file `some/file/xxxxxxxxxxxxxxxx.yml` does not have valid YAML syntax!')
end
end
+
+ context 'when the class has no validate_context!' do
+ let(:test_class) do
+ Class.new(described_class) do
+ def initialize(params, context)
+ @location = params
+
+ super
+ end
+ end
+ end
+
+ let(:location) { 'some/file/config.yaml' }
+
+ it 'raises an error' do
+ expect { valid? }.to raise_error(NotImplementedError)
+ end
+ end
end
describe '#to_hash' do
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index e12f5dcee0a..d905568f01e 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -113,7 +113,19 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
it_behaves_like 'logging config file fetch', 'config_file_fetch_template_content_duration_s', 1
end
- context 'when the key is a hash of file and remote' do
+ context 'when the key is not valid' do
+ let(:local_file) { 'secret-file.yml' }
+ let(:values) do
+ { include: { invalid: local_file },
+ image: 'image:1.0' }
+ end
+
+ it 'returns ambigious specification error' do
+ expect { subject }.to raise_error(described_class::AmbigiousSpecificationError, '`{"invalid":"secret-file.yml"}` does not have a valid subkey for include. Valid subkeys are: `local`, `project`, `remote`, `template`, `artifact`')
+ end
+ end
+
+ context 'when the key is a hash of local and remote' do
let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret-file', 'masked' => true }]) }
let(:local_file) { 'secret-file.yml' }
let(:remote_url) { 'https://gitlab.com/secret-file.yml' }
@@ -123,7 +135,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
end
it 'returns ambigious specification error' do
- expect { subject }.to raise_error(described_class::AmbigiousSpecificationError, 'Include `{"local":"xxxxxxxxxxx.yml","remote":"https://gitlab.com/xxxxxxxxxxx.yml"}` needs to match exactly one accessor!')
+ expect { subject }.to raise_error(described_class::AmbigiousSpecificationError, 'Each include must use only one of: `local`, `project`, `remote`, `template`, `artifact`')
end
end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 475503de7da..c4a6641ff6b 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -484,7 +484,7 @@ RSpec.describe Gitlab::Ci::Config do
it 'raises ConfigError' do
expect { config }.to raise_error(
described_class::ConfigError,
- 'Include `{"remote":"http://url","local":"/local/file.yml"}` needs to match exactly one accessor!'
+ /Each include must use only one of/
)
end
end
@@ -714,7 +714,7 @@ RSpec.describe Gitlab::Ci::Config do
it 'raises an error' do
expect { config }.to raise_error(
described_class::ConfigError,
- /needs to match exactly one accessor!/
+ /does not have a valid subkey for include/
)
end
end
diff --git a/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb b/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb
index 6a08e8f0b7f..1ef341ff863 100644
--- a/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Codequality::CodeClimate do
describe '#parse!' do
- subject(:parse) { described_class.new.parse!(code_climate, codequality_report) }
+ subject(:parse) { described_class.new.parse!(code_climate, codequality_report, metadata) }
let(:codequality_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:code_climate) do
@@ -35,6 +35,15 @@ RSpec.describe Gitlab::Ci::Parsers::Codequality::CodeClimate do
].to_json
end
+ let_it_be(:group) { create(:group, name: 'test-group') }
+ let_it_be(:project) { create(:project, path: 'test-project', group: group) }
+ let(:metadata) do
+ {
+ project: project,
+ commit_sha: 'f0cc5229e2aa5e9429f1b17a3b3b102f21d7fe31'
+ }
+ end
+
context "when data is code_climate style JSON" do
context "when there are no degradations" do
let(:code_climate) { [].to_json }
@@ -133,5 +142,56 @@ RSpec.describe Gitlab::Ci::Parsers::Codequality::CodeClimate do
expect(codequality_report.degradations_count).to eq(0)
end
end
+
+ context 'for web_url' do
+ let(:code_climate) do
+ [
+ {
+ "categories": [
+ "Complexity"
+ ],
+ "check_name": "argument_count",
+ "content": {
+ "body": ""
+ },
+ "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
+ "fingerprint": "15cdb5c53afd42bc22f8ca366a08d547",
+ "location": {
+ "path": "foo.rb",
+ "lines": {
+ "begin": 10,
+ "end": 10
+ }
+ },
+ "other_locations": [],
+ "remediation_points": 900000,
+ "severity": "major",
+ "type": "issue",
+ "engine_name": "structure"
+ }
+ ].to_json
+ end
+
+ context 'when metadata has project and commit_sha' do
+ it 'adds a non nil url' do
+ want = 'http://localhost/test-group/test-project/-/blob/f0cc5229e2aa5e9429f1b17a3b3b102f21d7fe31/foo.rb#L10'
+ expect { parse }.not_to raise_error
+
+ expect(codequality_report.degradations_count).to eq(1)
+ expect(codequality_report.all_degradations[0]['web_url']).to eq(want)
+ end
+ end
+
+ context 'when metadata does not have project and commit_sha' do
+ let(:metadata) { {} }
+
+ it 'adds a nil url' do
+ expect { parse }.not_to raise_error
+
+ expect(codequality_report.degradations_count).to eq(1)
+ expect(codequality_report.all_degradations[0]['web_url']).to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
index a9851d78f48..e4ae6b25362 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
describe '#parse!' do
let(:coverage_report) { Gitlab::Ci::Reports::CoverageReport.new }
let(:project_path) { 'foo/bar' }
+ let(:windows_path) { 'foo\bar' }
let(:paths) { ['app/user.rb'] }
let(:cobertura) do
@@ -269,6 +270,36 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
it_behaves_like 'ignoring sources, project_path, and worktree_paths'
end
+ context 'and has Windows-style paths' do
+ let(:sources_xml) do
+ <<~EOF_WIN
+ <sources>
+ <source>D:\\builds\\#{windows_path}\\app</source>
+ </sources>
+ EOF_WIN
+ end
+
+ context 'when there is a single <class>' do
+ context 'with a single line' do
+ let(:classes_xml) do
+ <<~EOF
+ <packages><package name="app"><classes>
+ <class filename="user.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with the filename relative to project root' do
+ expect { parse_report }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app/user.rb' => { 1 => 2 } })
+ end
+ end
+ end
+ end
+
context 'and has multiple sources with a pattern for Go projects' do
let(:project_path) { 'local/go' } # Make sure we're not making false positives
let(:sources_xml) do
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
index 38b229e0dd8..f09b85aa2c7 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
@@ -3,7 +3,7 @@
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Sbom::CyclonedxProperties do
- subject(:parse_source) { described_class.parse_source(properties) }
+ subject(:parse_source_from_properties) { described_class.parse_source(properties) }
context 'when properties are nil' do
let(:properties) { nil }
@@ -50,9 +50,9 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::CyclonedxProperties do
end
it 'does not call dependency_scanning parser' do
- expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).not_to receive(:parse_source)
+ expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).not_to receive(:source)
- parse_source
+ parse_source_from_properties
end
end
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::CyclonedxProperties do
it 'passes only supported properties to the dependency scanning parser' do
expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).to receive(:source).with(expected_input)
- parse_source
+ parse_source_from_properties
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
index f3636106b98..0b094880f69 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
@@ -100,16 +100,53 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx do
]
end
+ before do
+ allow(report).to receive(:add_component)
+ end
+
it 'adds each component, ignoring unused attributes' do
expect(report).to receive(:add_component)
- .with(an_object_having_attributes(name: "activesupport", version: "5.1.4", component_type: "library"))
+ .with(
+ an_object_having_attributes(
+ name: "activesupport",
+ version: "5.1.4",
+ component_type: "library",
+ purl: an_object_having_attributes(type: "gem")
+ )
+ )
expect(report).to receive(:add_component)
- .with(an_object_having_attributes(name: "byebug", version: "10.0.0", component_type: "library"))
+ .with(
+ an_object_having_attributes(
+ name: "byebug",
+ version: "10.0.0",
+ component_type: "library",
+ purl: an_object_having_attributes(type: "gem")
+ )
+ )
expect(report).to receive(:add_component)
.with(an_object_having_attributes(name: "minimal-component", version: nil, component_type: "library"))
parse!
end
+
+ context 'when a component has an invalid purl' do
+ before do
+ components.push(
+ {
+ "name" => "invalid-component",
+ "version" => "v0.0.1",
+ "purl" => "pkg:nil",
+ "type" => "library"
+ }
+ )
+ end
+
+ it 'adds an error to the report' do
+ expect(report).to receive(:add_error).with("/components/#{components.size - 1}/purl is invalid")
+
+ parse!
+ end
+ end
end
context 'when report has metadata properties' do
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 7dbad354e4c..03cab021c17 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -400,26 +400,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
end
describe 'parsing tracking' do
- let(:tracking_data) do
- {
- 'type' => 'source',
- 'items' => [
- 'signatures' => [
- { 'algorithm' => 'hash', 'value' => 'hash_value' },
- { 'algorithm' => 'location', 'value' => 'location_value' },
- { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
- ]
- ]
- }
- end
-
- context 'with valid tracking information' do
- it 'creates signatures for each algorithm' do
- finding = report.findings.first
- expect(finding.signatures.size).to eq(3)
- expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location', 'scope_offset'])
- end
- end
+ let(:finding) { report.findings.first }
context 'with invalid tracking information' do
let(:tracking_data) do
@@ -436,15 +417,26 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
end
it 'ignores invalid algorithm types' do
- finding = report.findings.first
expect(finding.signatures.size).to eq(2)
expect(finding.signatures.map(&:algorithm_type).to_set).to eq(Set['hash', 'location'])
end
end
context 'with valid tracking information' do
+ let(:tracking_data) do
+ {
+ 'type' => 'source',
+ 'items' => [
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
+ ]
+ ]
+ }
+ end
+
it 'creates signatures for each signature algorithm' do
- finding = report.findings.first
expect(finding.signatures.size).to eq(3)
expect(finding.signatures.map(&:algorithm_type)).to eq(%w[hash location scope_offset])
@@ -456,7 +448,6 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
end
it 'sets the uuid according to the higest priority signature' do
- finding = report.findings.first
highest_signature = finding.signatures.max_by(&:priority)
identifiers = if signatures_enabled
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index 6e8b6e40928..9126c6dab21 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -409,4 +409,21 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
end
end
end
+
+ describe '#observe_pipeline_size' do
+ let(:command) { described_class.new(project: project) }
+
+ let(:pipeline) { instance_double(Ci::Pipeline, total_size: 5, project: project, source: "schedule") }
+
+ it 'logs the pipeline total size to histogram' do
+ histogram = instance_double(Prometheus::Client::Histogram)
+
+ expect(::Gitlab::Ci::Pipeline::Metrics).to receive(:pipeline_size_histogram)
+ .and_return(histogram)
+ expect(histogram).to receive(:observe)
+ .with({ source: pipeline.source, plan: project.actual_plan_name }, pipeline.total_size)
+
+ command.observe_pipeline_size(pipeline)
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb
index bc453f1502b..c5a5e905d17 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb
@@ -69,7 +69,9 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::ActiveJobs do
class: described_class.name,
message: described_class::MESSAGE,
project_id: project.id,
- plan: default_plan.name
+ plan: default_plan.name,
+ project_path: project.path,
+ jobs_in_alive_pipelines_count: step.send(:count_jobs_in_alive_pipelines)
)
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
new file mode 100644
index 00000000000..ce1ee2fcda0
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, ref: 'master', user: user)
+ end
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(
+ project: project,
+ current_user: user,
+ origin_ref: 'master')
+ end
+
+ let(:dependencies) do
+ [
+ Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::SeedBlock.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Seed.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Populate.new(pipeline, command)
+ ]
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ let(:config) do
+ { rspec: { script: 'rspec' } }
+ end
+
+ def run_chain
+ dependencies.map(&:perform!)
+ step.perform!
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ context 'with pipeline name' do
+ let(:config) do
+ { workflow: { name: ' Pipeline name ' }, rspec: { script: 'rspec' } }
+ end
+
+ it 'does not break the chain' do
+ run_chain
+
+ expect(step.break?).to be false
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(pipeline_name: false)
+ end
+
+ it 'does not build pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata).to be_nil
+ end
+ end
+
+ context 'with feature flag enabled' do
+ before do
+ stub_feature_flags(pipeline_name: true)
+ end
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.name).to eq('Pipeline name')
+ expect(pipeline.pipeline_metadata.project).to eq(pipeline.project)
+ expect(pipeline.pipeline_metadata).not_to be_persisted
+ end
+
+ context 'with empty name' do
+ let(:config) do
+ { workflow: { name: ' ' }, rspec: { script: 'rspec' } }
+ end
+
+ it 'strips whitespace from name' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata).to be_nil
+ end
+ end
+
+ context 'with variables' do
+ let(:config) do
+ {
+ variables: { ROOT_VAR: 'value $WORKFLOW_VAR1' },
+ workflow: {
+ name: 'Pipeline $ROOT_VAR $WORKFLOW_VAR2 $UNKNOWN_VAR',
+ rules: [{ variables: { WORKFLOW_VAR1: 'value1', WORKFLOW_VAR2: 'value2' } }]
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ it 'substitutes variables' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.name).to eq('Pipeline value value1 value2 ')
+ end
+ end
+
+ context 'with invalid name' do
+ let(:config) do
+ {
+ variables: { ROOT_VAR: 'a' * 256 },
+ workflow: {
+ name: 'Pipeline $ROOT_VAR'
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ it 'returns error and breaks chain' do
+ ret = run_chain
+
+ expect(ret)
+ .to match_array(["Failed to build pipeline metadata! Name is too long (maximum is 255 characters)"])
+ expect(pipeline.pipeline_metadata.errors.full_messages)
+ .to match_array(['Name is too long (maximum is 255 characters)'])
+ expect(step.break?).to be true
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 51d1661b586..62de4d2e96d 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -236,47 +236,4 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
end
end
end
-
- context 'with pipeline name' do
- let(:config) do
- { workflow: { name: ' Pipeline name ' }, rspec: { script: 'rspec' } }
- end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(pipeline_name: false)
- end
-
- it 'does not build pipeline_metadata' do
- run_chain
-
- expect(pipeline.pipeline_metadata).to be_nil
- end
- end
-
- context 'with feature flag enabled' do
- before do
- stub_feature_flags(pipeline_name: true)
- end
-
- it 'builds pipeline_metadata' do
- run_chain
-
- expect(pipeline.pipeline_metadata.title).to eq('Pipeline name')
- expect(pipeline.pipeline_metadata.project).to eq(pipeline.project)
- end
-
- context 'with empty name' do
- let(:config) do
- { workflow: { name: ' ' }, rspec: { script: 'rspec' } }
- end
-
- it 'strips whitespace from name' do
- run_chain
-
- expect(pipeline.pipeline_metadata).to be_nil
- end
- end
- end
- end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
index c69aa661b05..31086f6ae4a 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -80,7 +80,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
subject.build!
expect(histogram).to have_received(:observe)
- .with({ source: 'push' }, 0)
+ .with({ source: 'push', plan: project.actual_plan_name }, 0)
end
describe 'active jobs by pipeline plan histogram' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
deleted file mode 100644
index 6569ce937ac..00000000000
--- a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
+++ /dev/null
@@ -1,119 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Ci::Pipeline::Seed::Deployment do
- let_it_be(:project, refind: true) { create(:project, :repository) }
-
- let(:pipeline) do
- create(:ci_pipeline, project: project, sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
- end
-
- let(:job) { build(:ci_build, project: project, pipeline: pipeline) }
- let(:environment) { Gitlab::Ci::Pipeline::Seed::Environment.new(job).to_resource }
- let(:seed) { described_class.new(job, environment) }
- let(:attributes) { {} }
-
- before do
- job.assign_attributes(**attributes)
- end
-
- describe '#to_resource' do
- subject { seed.to_resource }
-
- context 'when job has environment attribute' do
- let(:attributes) do
- {
- environment: 'production',
- options: { environment: { name: 'production', **kubernetes_options } }
- }
- end
-
- let(:kubernetes_options) { {} }
-
- it 'returns a deployment object with environment' do
- expect(subject).to be_a(Deployment)
- expect(subject.iid).to be_present
- expect(subject.environment.name).to eq('production')
- expect(subject.cluster).to be_nil
- expect(subject.deployment_cluster).to be_nil
- end
-
- context 'when environment has deployment platform' do
- let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project], managed: managed_cluster) }
- let(:managed_cluster) { true }
-
- it 'sets the cluster and deployment_cluster' do
- expect(subject.cluster).to eq(cluster) # until we stop double writing in 12.9: https://gitlab.com/gitlab-org/gitlab/issues/202628
- expect(subject.deployment_cluster.cluster).to eq(cluster)
- end
-
- context 'when a custom namespace is given' do
- let(:kubernetes_options) { { kubernetes: { namespace: 'the-custom-namespace' } } }
-
- context 'when cluster is managed' do
- it 'does not set the custom namespace' do
- expect(subject.deployment_cluster.kubernetes_namespace).not_to eq('the-custom-namespace')
- end
- end
-
- context 'when cluster is not managed' do
- let(:managed_cluster) { false }
-
- it 'sets the custom namespace' do
- expect(subject.deployment_cluster.kubernetes_namespace).to eq('the-custom-namespace')
- end
- end
- end
- end
-
- context 'when environment has an invalid URL' do
- let(:attributes) do
- {
- environment: '!!!',
- options: { environment: { name: '!!!' } }
- }
- end
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
-
- context 'when job has already deployment' do
- let(:job) { build(:ci_build, :with_deployment, project: project, environment: 'production') }
-
- it 'returns the persisted deployment' do
- is_expected.to eq(job.deployment)
- end
- end
- end
-
- context 'when job does not start environment' do
- where(:action) do
- %w(stop prepare verify access)
- end
-
- with_them do
- let(:attributes) do
- {
- environment: 'production',
- options: { environment: { name: 'production', action: action } }
- }
- end
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
- end
-
- context 'when job does not have environment attribute' do
- let(:attributes) { { name: 'test' } }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
deleted file mode 100644
index 2b9d8127886..00000000000
--- a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
+++ /dev/null
@@ -1,224 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
- let_it_be(:project) { create(:project) }
-
- let!(:pipeline) { create(:ci_pipeline, project: project) }
-
- let(:job) { build(:ci_build, project: project, pipeline: pipeline) }
- let(:seed) { described_class.new(job) }
- let(:attributes) { {} }
-
- before do
- job.assign_attributes(**attributes)
- end
-
- describe '#to_resource' do
- subject { seed.to_resource }
-
- shared_examples_for 'returning a correct environment' do
- let(:expected_auto_stop_in_seconds) do
- if expected_auto_stop_in
- ChronicDuration.parse(expected_auto_stop_in).seconds
- end
- end
-
- it 'returns a persisted environment object' do
- freeze_time do
- expect { subject }.to change { Environment.count }.by(1)
-
- expect(subject).to be_a(Environment)
- expect(subject).to be_persisted
- expect(subject.project).to eq(project)
- expect(subject.name).to eq(expected_environment_name)
- expect(subject.auto_stop_in).to eq(expected_auto_stop_in_seconds)
- end
- end
-
- context 'when environment has already existed' do
- let!(:environment) do
- create(:environment,
- project: project,
- name: expected_environment_name
- ).tap do |env|
- env.auto_stop_in = expected_auto_stop_in
- end
- end
-
- it 'returns the existing environment object' do
- expect { subject }.not_to change { Environment.count }
- expect { subject }.not_to change { environment.auto_stop_at }
-
- expect(subject).to be_persisted
- expect(subject).to eq(environment)
- end
- end
- end
-
- context 'when job has environment name attribute' do
- let(:environment_name) { 'production' }
- let(:expected_environment_name) { 'production' }
- let(:expected_auto_stop_in) { nil }
-
- let(:attributes) do
- {
- environment: environment_name,
- options: { environment: { name: environment_name } }
- }
- end
-
- it_behaves_like 'returning a correct environment'
-
- context 'and job environment also has an auto_stop_in attribute' do
- let(:environment_auto_stop_in) { '5 minutes' }
- let(:expected_auto_stop_in) { '5 minutes' }
-
- let(:attributes) do
- {
- environment: environment_name,
- options: {
- environment: {
- name: environment_name,
- auto_stop_in: environment_auto_stop_in
- }
- }
- }
- end
-
- it_behaves_like 'returning a correct environment'
- end
-
- context 'and job environment has an auto_stop_in variable attribute' do
- let(:environment_auto_stop_in) { '10 minutes' }
- let(:expected_auto_stop_in) { '10 minutes' }
-
- let(:attributes) do
- {
- environment: environment_name,
- options: {
- environment: {
- name: environment_name,
- auto_stop_in: '$TTL'
- }
- },
- yaml_variables: [
- { key: "TTL", value: environment_auto_stop_in, public: true }
- ]
- }
- end
-
- it_behaves_like 'returning a correct environment'
- end
- end
-
- context 'when job has deployment tier attribute' do
- let(:attributes) do
- {
- environment: 'customer-portal',
- options: {
- environment: {
- name: 'customer-portal',
- deployment_tier: deployment_tier
- }
- }
- }
- end
-
- let(:deployment_tier) { 'production' }
-
- context 'when environment has not been created yet' do
- it 'sets the specified deployment tier' do
- is_expected.to be_production
- end
-
- context 'when deployment tier is staging' do
- let(:deployment_tier) { 'staging' }
-
- it 'sets the specified deployment tier' do
- is_expected.to be_staging
- end
- end
-
- context 'when deployment tier is unknown' do
- let(:deployment_tier) { 'unknown' }
-
- it 'raises an error' do
- expect { subject }.to raise_error(ArgumentError, "'unknown' is not a valid tier")
- end
- end
- end
-
- context 'when environment has already been created' do
- before do
- create(:environment, project: project, name: 'customer-portal', tier: :staging)
- end
-
- it 'does not overwrite the specified deployment tier' do
- # This is to be updated when a deployment succeeded i.e. Deployments::UpdateEnvironmentService.
- is_expected.to be_staging
- end
- end
- end
-
- context 'when job starts a review app' do
- let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
- let(:expected_environment_name) { "review/#{job.ref}" }
- let(:expected_auto_stop_in) { nil }
-
- let(:attributes) do
- {
- environment: environment_name,
- options: { environment: { name: environment_name } }
- }
- end
-
- it_behaves_like 'returning a correct environment'
- end
-
- context 'when job stops a review app' do
- let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
- let(:expected_environment_name) { "review/#{job.ref}" }
- let(:expected_auto_stop_in) { nil }
-
- let(:attributes) do
- {
- environment: environment_name,
- options: { environment: { name: environment_name, action: 'stop' } }
- }
- end
-
- it_behaves_like 'returning a correct environment'
- end
-
- context 'when merge_request is provided' do
- let(:environment_name) { 'development' }
- let(:attributes) { { environment: environment_name, options: { environment: { name: environment_name } } } }
- let(:merge_request) { create(:merge_request, source_project: project) }
- let(:seed) { described_class.new(job, merge_request: merge_request) }
-
- context 'and environment does not exist' do
- let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
-
- it 'creates an environment associated with the merge request' do
- expect { subject }.to change { Environment.count }.by(1)
-
- expect(subject.merge_request).to eq(merge_request)
- end
- end
-
- context 'and environment already exists' do
- before do
- create(:environment, project: project, name: environment_name)
- end
-
- it 'does not change the merge request associated with the environment' do
- expect { subject }.not_to change { Environment.count }
-
- expect(subject.merge_request).to be_nil
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
index a76b4874eca..55980ae72a0 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
@@ -6,7 +6,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let(:seed_context) { Gitlab::Ci::Pipeline::Seed::Context.new(pipeline, root_variables: []) }
+ let(:root_variables) { [] }
+
+ let(:seed_context) { Gitlab::Ci::Pipeline::Seed::Context.new(pipeline, root_variables: root_variables) }
let(:stages_attributes) do
[
@@ -75,4 +77,12 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
expect(seed.deployments_count).to eq(2)
end
end
+
+ describe '#root_variables' do
+ let(:root_variables) { %w[var1 value1] }
+
+ it 'returns root_variables' do
+ expect(seed.root_variables).to eq(root_variables)
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
index 06ea3433ef0..cdaf9354104 100644
--- a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
+++ b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
@@ -1,23 +1,67 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::Sbom::Component do
- let(:attributes) do
- {
- type: 'library',
- name: 'component-name',
- version: 'v0.0.1'
- }
- end
+ let(:component_type) { 'library' }
+ let(:name) { 'component-name' }
+ let(:purl_type) { 'npm' }
+ let(:purl) { Sbom::PackageUrl.new(type: purl_type, name: name, version: version).to_s }
+ let(:version) { 'v0.0.1' }
- subject { described_class.new(**attributes) }
+ subject(:component) do
+ described_class.new(
+ type: component_type,
+ name: name,
+ purl: purl,
+ version: version
+ )
+ end
it 'has correct attributes' do
- expect(subject).to have_attributes(
- component_type: attributes[:type],
- name: attributes[:name],
- version: attributes[:version]
+ expect(component).to have_attributes(
+ component_type: component_type,
+ name: name,
+ purl: an_object_having_attributes(type: purl_type),
+ version: version
)
end
+
+ describe '#ingestible?' do
+ subject { component.ingestible? }
+
+ context 'when component_type is invalid' do
+ let(:component_type) { 'invalid' }
+
+ it { is_expected.to be(false) }
+ end
+
+ context 'when purl_type is invalid' do
+ let(:purl_type) { 'invalid' }
+
+ it { is_expected.to be(false) }
+ end
+
+ context 'when component_type is valid' do
+ where(:component_type) { ::Enums::Sbom.component_types.keys.map(&:to_s) }
+
+ with_them do
+ it { is_expected.to be(true) }
+ end
+ end
+
+ context 'when purl_type is valid' do
+ where(:purl_type) { ::Enums::Sbom.purl_types.keys.map(&:to_s) }
+
+ with_them do
+ it { is_expected.to be(true) }
+ end
+ end
+
+ context 'when there is no purl' do
+ let(:purl) { nil }
+
+ it { is_expected.to be(true) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/reports/sbom/report_spec.rb b/spec/lib/gitlab/ci/reports/sbom/report_spec.rb
index 6ffa93e5fc8..f9a83378f46 100644
--- a/spec/lib/gitlab/ci/reports/sbom/report_spec.rb
+++ b/spec/lib/gitlab/ci/reports/sbom/report_spec.rb
@@ -5,6 +5,21 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::Sbom::Report do
subject(:report) { described_class.new }
+ describe '#valid?' do
+ context 'when there are no errors' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'when report contains errors' do
+ before do
+ report.add_error('error1')
+ report.add_error('error2')
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+
describe '#add_error' do
it 'appends errors to a list' do
report.add_error('error1')
diff --git a/spec/lib/gitlab/ci/reports/security/flag_spec.rb b/spec/lib/gitlab/ci/reports/security/flag_spec.rb
index 6ee074f7aeb..0ef8f6c75a0 100644
--- a/spec/lib/gitlab/ci/reports/security/flag_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/flag_spec.rb
@@ -29,5 +29,11 @@ RSpec.describe Gitlab::Ci::Reports::Security::Flag do
)
end
end
+
+ describe '#false_positive?' do
+ subject { security_flag.false_positive? }
+
+ it { is_expected.to be_truthy }
+ end
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/reports_spec.rb b/spec/lib/gitlab/ci/reports/security/reports_spec.rb
index e240edc4a12..33f3317c655 100644
--- a/spec/lib/gitlab/ci/reports/security/reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/reports_spec.rb
@@ -125,6 +125,32 @@ RSpec.describe Gitlab::Ci::Reports::Security::Reports do
it { is_expected.to be(false) }
end
+
+ context 'when target_reports is not nil and reports is empty' do
+ let(:without_reports) { described_class.new(pipeline) }
+
+ subject { without_reports.violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels, vulnerability_states) }
+
+ before do
+ target_reports.get_report('sast', artifact).add_finding(high_severity_dast)
+ end
+
+ context 'when require_approval_on_scan_removal feature is enabled' do
+ before do
+ stub_feature_flags(require_approval_on_scan_removal: true)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when require_approval_on_scan_removal feature is disabled' do
+ before do
+ stub_feature_flags(require_approval_on_scan_removal: false)
+ end
+
+ it { is_expected.to be(false) }
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index 4a1f77bed65..05f6a8a8cb6 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -209,7 +209,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
describe "##{status_type}" do
- subject { test_suite.public_send("#{status_type}") }
+ subject { test_suite.public_send(status_type.to_s) }
context "when #{status_type} test case exists" do
before do
diff --git a/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
index 8204b104832..43deb465025 100644
--- a/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe '5-Minute-Production-App.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
index 65fd2b016ac..f2bff5ff3e0 100644
--- a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
let(:platform_target) { 'ECS' }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
index 21052f03cb8..07cfa939623 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
@@ -3,8 +3,20 @@
require 'spec_helper'
RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
+ include Ci::TemplateHelpers
+
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Build') }
+ describe 'AUTO_BUILD_IMAGE_VERSION' do
+ it 'corresponds to a published image in the registry' do
+ registry = "https://#{template_registry_host}"
+ repository = "gitlab-org/cluster-integration/auto-build-image"
+ reference = YAML.safe_load(template.content).dig('variables', 'AUTO_BUILD_IMAGE_VERSION')
+
+ expect(public_image_exist?(registry, repository, reference)).to be true
+ end
+ end
+
describe 'the created pipeline' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -12,7 +24,7 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
index d88d9782021..16c5d7a4b6d 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -62,7 +62,8 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
context 'on master' do
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
@@ -70,7 +71,8 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
let(:pipeline_ref) { 'feature' }
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
@@ -78,7 +80,8 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
let(:pipeline_ref) { 'v1.0.0' }
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
index b657f73fa77..acb296082b8 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
+ include Ci::TemplateHelpers
+
subject(:template) do
<<~YAML
stages:
@@ -26,6 +28,17 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
YAML
end
+ describe 'AUTO_DEPLOY_IMAGE_VERSION' do
+ it 'corresponds to a published image in the registry' do
+ template = Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Deploy')
+ registry = "https://#{template_registry_host}"
+ repository = "gitlab-org/cluster-integration/auto-deploy-image"
+ reference = YAML.safe_load(template.content, aliases: true).dig('variables', 'AUTO_DEPLOY_IMAGE_VERSION')
+
+ expect(public_image_exist?(registry, repository, reference)).to be true
+ end
+ end
+
describe 'the created pipeline' do
let_it_be(:project, refind: true) { create(:project, :repository) }
@@ -33,7 +46,7 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb
index 85516d0bbb0..8a5aea7c0f0 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb
@@ -9,10 +9,10 @@ RSpec.describe 'Jobs/SAST-IaC.gitlab-ci.yml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
- let(:default_branch) { 'main' }
+ let(:default_branch) { "master" }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -49,7 +49,8 @@ RSpec.describe 'Jobs/SAST-IaC.gitlab-ci.yml' do
context 'on default branch' do
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
@@ -57,7 +58,8 @@ RSpec.describe 'Jobs/SAST-IaC.gitlab-ci.yml' do
let(:pipeline_ref) { 'feature' }
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
index 5ff179b6fee..d540b035f81 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
@@ -9,10 +9,10 @@ RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
- let(:default_branch) { 'main' }
+ let(:default_branch) { "master" }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -50,7 +50,8 @@ RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do
context 'on default branch' do
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
@@ -58,7 +59,8 @@ RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do
let(:pipeline_ref) { 'feature' }
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
index a92a8397e96..7cf0cf3ed33 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -62,7 +62,8 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
context 'on master' do
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
@@ -70,7 +71,8 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
let(:pipeline_ref) { 'feature' }
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
@@ -78,7 +80,8 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
let(:pipeline_ref) { 'v1.0.0' }
it 'has no jobs' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError)
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/MATLAB_spec.rb b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb
index 432040c4a14..3889d1fc8c9 100644
--- a/spec/lib/gitlab/ci/templates/MATLAB_spec.rb
+++ b/spec/lib/gitlab/ci/templates/MATLAB_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'MATLAB.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_branch) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
index eca79f37779..42df924f8fd 100644
--- a/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Terraform/Base.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb
index 0ab81f97f20..332708ffa13 100644
--- a/spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Terraform/base_latest_gitlab_ci_yaml_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Terraform/Base.latest.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
index d6c7cd32f79..0f0192ad38f 100644
--- a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
let(:default_branch) { 'master' }
let(:pipeline_ref) { default_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index 1a909f52ec3..b2ca906e172 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
index de94eec09fe..afb7773ad7a 100644
--- a/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/flutter_gitlab_ci_yaml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Flutter.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb
index ebf52e6d65a..62e4188f59b 100644
--- a/spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Kaniko.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'Dockerfile' => 'FROM alpine:latest' }) }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/katalon_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/katalon_gitlab_ci_yaml_spec.rb
index 5a62324da74..a44833b0c01 100644
--- a/spec/lib/gitlab/ci/templates/katalon_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/katalon_gitlab_ci_yaml_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Katalon.gitlab-ci.yml' do
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: 'master' ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
diff --git a/spec/lib/gitlab/ci/templates/npm_spec.rb b/spec/lib/gitlab/ci/templates/npm_spec.rb
index d86a3a67823..55fd4675f11 100644
--- a/spec/lib/gitlab/ci/templates/npm_spec.rb
+++ b/spec/lib/gitlab/ci/templates/npm_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'npm.gitlab-ci.yml' do
let(:pipeline_tag) { 'v1.2.1' }
let(:pipeline_ref) { pipeline_branch }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
def create_branch(name:)
@@ -42,7 +42,8 @@ RSpec.describe 'npm.gitlab-ci.yml' do
shared_examples 'no pipeline created' do
it 'does not create a pipeline because the only job (publish) is not created' do
- expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError, 'No stages / jobs for this pipeline.')
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
diff --git a/spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb
index 2fc4b509aab..aa7d0249066 100644
--- a/spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/terraform_gitlab_ci_yaml_spec.rb
@@ -12,10 +12,10 @@ RSpec.describe 'Terraform.gitlab-ci.yml' do
describe 'the created pipeline' do
let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
- let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let_it_be(:project) { create(:project, :repository, create_branch: 'patch-1') }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -27,23 +27,30 @@ RSpec.describe 'Terraform.gitlab-ci.yml' do
end
context 'on master branch' do
- it 'creates init, validate and build jobs', :aggregate_failures do
+ it 'creates init, validate,build terraform jobs as well as kics-iac-sast job', :aggregate_failures do
expect(pipeline.errors).to be_empty
- expect(build_names).to include('validate', 'build', 'deploy')
+ expect(build_names).to include('kics-iac-sast', 'validate', 'build', 'deploy')
end
end
context 'outside the master branch' do
let(:pipeline_branch) { 'patch-1' }
- before do
- project.repository.create_branch(pipeline_branch, default_branch)
- end
-
it 'does not creates a deploy and a test job', :aggregate_failures do
expect(pipeline.errors).to be_empty
expect(build_names).not_to include('deploy')
end
end
+
+ context 'on merge request' do
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project) }
+ let(:pipeline) { service.execute(merge_request).payload }
+
+ it 'creates a pipeline with no jobs' do
+ expect(pipeline).to be_merge_request_event
+ expect(pipeline.builds.count).to be_zero
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
index 42e56c4ab3c..6ae51f9783b 100644
--- a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
@@ -12,10 +12,10 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
describe 'the created pipeline' do
let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
- let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
- let(:user) { project.first_owner }
+ let_it_be(:project) { create(:project, :repository, create_branch: 'patch-1') }
+ let_it_be(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -36,14 +36,38 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
context 'outside the master branch' do
let(:pipeline_branch) { 'patch-1' }
- before do
- project.repository.create_branch(pipeline_branch, default_branch)
- end
-
it 'does not creates a deploy and a test job', :aggregate_failures do
expect(pipeline.errors).to be_empty
expect(build_names).not_to include('deploy')
end
end
+
+ context 'on merge request' do
+ let(:pipeline_branch) { 'patch-1' }
+ let(:mr_service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project, source_branch: pipeline_branch ) }
+ let(:mr_pipeline) { mr_service.execute(merge_request).payload }
+ let(:mr_build_names) { mr_pipeline.builds.pluck(:name) }
+ let(:branch_service) { Ci::CreatePipelineService.new(project, user, ref: merge_request.source_branch ) }
+ let(:branch_pipeline) { branch_service.execute(:push).payload }
+ let(:branch_build_names) { branch_pipeline.builds.pluck(:name) }
+
+ # This is needed so that the terraform artifacts and sast_iac artifacts
+ # are both available in the MR
+ it 'creates a pipeline with the terraform and sast_iac jobs' do
+ expect(mr_pipeline).to be_merge_request_event
+ expect(mr_pipeline.errors.full_messages).to be_empty
+ expect(mr_build_names).to include('kics-iac-sast', 'validate', 'build')
+ end
+
+ it 'does not creates a deploy', :aggregate_failures do
+ expect(mr_build_names).not_to include('deploy')
+ end
+
+ it 'does not create a branch pipeline', :aggregate_failures do
+ expect(branch_build_names).to be_empty
+ expect(branch_pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb
index 4708108f404..157fd39f1cc 100644
--- a/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'ThemeKit.gitlab-ci.yml' do
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.first_owner }
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) }
- let(:pipeline) { service.execute!(:push).payload }
+ let(:pipeline) { service.execute(:push).payload }
let(:build_names) { pipeline.builds.pluck(:name) }
before do
@@ -51,9 +51,8 @@ RSpec.describe 'ThemeKit.gitlab-ci.yml' do
end
it 'has no jobs' do
- expect { pipeline }.to raise_error(
- Ci::CreatePipelineService::CreateError, 'No stages / jobs for this pipeline.'
- )
+ expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection/item_spec.rb b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
index 9443bf6d6d5..f7c6f7f51df 100644
--- a/spec/lib/gitlab/ci/variables/collection/item_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
@@ -197,11 +197,11 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
end
end
- describe '#raw' do
+ describe '#raw?' do
it 'returns false when :raw is not specified' do
item = described_class.new(**variable)
- expect(item.raw).to eq false
+ expect(item.raw?).to eq false
end
context 'when :raw is specified as true' do
@@ -212,7 +212,7 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
it 'returns true' do
item = described_class.new(**variable)
- expect(item.raw).to eq true
+ expect(item.raw?).to eq true
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb
index 7d4a1eef70b..10b8f0065d9 100644
--- a/spec/lib/gitlab/ci/variables/collection_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection_spec.rb
@@ -300,7 +300,6 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
Gitlab::Ci::Variables::Collection.new
.append(key: 'CI_JOB_NAME', value: 'test-1')
.append(key: 'CI_BUILD_ID', value: '1')
- .append(key: 'RAW_VAR', value: '$TEST1', raw: true)
.append(key: 'TEST1', value: 'test-3')
.append(key: 'FILEVAR1', value: 'file value 1', file: true)
end
@@ -322,10 +321,6 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
value: 'key${TEST1}-${CI_JOB_NAME}',
result: 'keytest-3-test-1'
},
- "complex expansions with raw variable": {
- value: 'key${RAW_VAR}-${CI_JOB_NAME}',
- result: 'key$TEST1-test-1'
- },
"missing variable not keeping original": {
value: 'key${MISSING_VAR}-${CI_JOB_NAME}',
result: 'key-test-1'
@@ -339,22 +334,22 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
value: 'key-$TEST1-%%HOME%%-$${HOME}',
result: 'key-test-3-%%HOME%%-$${HOME}'
},
- "file variable with expand_file_vars: true": {
+ "file variable with expand_file_refs: true": {
value: 'key-$FILEVAR1-$TEST1',
result: 'key-file value 1-test-3'
},
- "file variable with expand_file_vars: false": {
+ "file variable with expand_file_refs: false": {
value: 'key-$FILEVAR1-$TEST1',
result: 'key-$FILEVAR1-test-3',
- expand_file_vars: false
+ expand_file_refs: false
}
}
end
with_them do
- let(:options) { { keep_undefined: keep_undefined, expand_file_vars: expand_file_vars }.compact }
+ let(:options) { { keep_undefined: keep_undefined, expand_file_refs: expand_file_refs }.compact }
- subject(:result) { collection.expand_value(value, **options) }
+ subject(:expanded_result) { collection.expand_value(value, **options) }
it 'matches expected expansion' do
is_expected.to eq(result)
@@ -509,17 +504,35 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
{ key: 'variable4', value: 'keyvalue${variable2}value3' }
]
},
- "complex expansions with raw variable": {
+ "complex expansions with raw variable with expand_raw_refs: true (default)": {
+ variables: [
+ { key: 'variable1', value: 'value1' },
+ { key: 'raw_var', value: 'raw-$variable1', raw: true },
+ { key: 'nonraw_var', value: 'nonraw-$variable1' },
+ { key: 'variable2', value: '$raw_var and $nonraw_var' }
+ ],
+ keep_undefined: false,
+ result: [
+ { key: 'variable1', value: 'value1' },
+ { key: 'raw_var', value: 'raw-$variable1', raw: true },
+ { key: 'nonraw_var', value: 'nonraw-value1' },
+ { key: 'variable2', value: 'raw-$variable1 and nonraw-value1' }
+ ]
+ },
+ "complex expansions with raw variable with expand_raw_refs: false": {
variables: [
- { key: 'variable3', value: 'key_${variable}_${variable2}' },
- { key: 'variable', value: '$variable2', raw: true },
- { key: 'variable2', value: 'value2' }
+ { key: 'variable1', value: 'value1' },
+ { key: 'raw_var', value: 'raw-$variable1', raw: true },
+ { key: 'nonraw_var', value: 'nonraw-$variable1' },
+ { key: 'variable2', value: '$raw_var and $nonraw_var' }
],
keep_undefined: false,
+ expand_raw_refs: false,
result: [
- { key: 'variable', value: '$variable2', raw: true },
- { key: 'variable2', value: 'value2' },
- { key: 'variable3', value: 'key_$variable2_value2' }
+ { key: 'variable1', value: 'value1' },
+ { key: 'raw_var', value: 'raw-$variable1', raw: true },
+ { key: 'nonraw_var', value: 'nonraw-value1' },
+ { key: 'variable2', value: '$raw_var and nonraw-value1' }
]
},
"variable value referencing password with special characters": {
@@ -553,8 +566,9 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
with_them do
let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) }
+ let(:options) { { keep_undefined: keep_undefined, expand_raw_refs: expand_raw_refs }.compact }
- subject { collection.sort_and_expand_all(keep_undefined: keep_undefined) }
+ subject(:expanded_result) { collection.sort_and_expand_all(**options) }
it 'returns Collection' do
is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection)
@@ -601,7 +615,8 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
it 'logs file_variable_is_referenced_in_another_variable once for VAR5' do
expect(Gitlab::AppJsonLogger).to receive(:info).with(
event: 'file_variable_is_referenced_in_another_variable',
- project_id: project.id
+ project_id: project.id,
+ variable: 'FILEVAR4'
).once
sort_and_expand_all
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index ebf8422489e..5de813f7739 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1071,6 +1071,7 @@ module Gitlab
let(:build) { execute.builds.first }
let(:job_variables) { build[:job_variables] }
+ let(:root_variables) { execute.root_variables }
let(:root_variables_inheritance) { build[:root_variables_inheritance] }
context 'when global variables are defined' do
@@ -1193,6 +1194,78 @@ module Gitlab
expect(root_variables_inheritance).to eq(true)
end
end
+
+ context 'when variables have data other than value' do
+ let(:config) do
+ <<~YAML
+ variables:
+ VAR1: value1
+ VAR2:
+ value: value2
+ description: description2
+ VAR3:
+ value: value3
+ expand: false
+
+ rspec:
+ script: rspec
+ variables:
+ VAR4: value4
+ VAR5:
+ value: value5
+ expand: false
+ VAR6:
+ value: value6
+ expand: true
+ YAML
+ end
+
+ it 'returns variables' do
+ expect(job_variables).to contain_exactly(
+ { key: 'VAR4', value: 'value4' },
+ { key: 'VAR5', value: 'value5', raw: true },
+ { key: 'VAR6', value: 'value6', raw: false }
+ )
+
+ expect(execute.root_variables).to contain_exactly(
+ { key: 'VAR1', value: 'value1' },
+ { key: 'VAR2', value: 'value2' },
+ { key: 'VAR3', value: 'value3', raw: true }
+ )
+
+ expect(execute.root_variables_with_prefill_data).to eq(
+ 'VAR1' => { value: 'value1' },
+ 'VAR2' => { value: 'value2', description: 'description2' },
+ 'VAR3' => { value: 'value3', raw: true }
+ )
+ end
+
+ context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
+ before do
+ stub_feature_flags(ci_raw_variables_in_yaml_config: false)
+ end
+
+ it 'returns variables without description and raw' do
+ expect(job_variables).to contain_exactly(
+ { key: 'VAR4', value: 'value4' },
+ { key: 'VAR5', value: 'value5' },
+ { key: 'VAR6', value: 'value6' }
+ )
+
+ expect(execute.root_variables).to contain_exactly(
+ { key: 'VAR1', value: 'value1' },
+ { key: 'VAR2', value: 'value2' },
+ { key: 'VAR3', value: 'value3' }
+ )
+
+ expect(execute.root_variables_with_prefill_data).to eq(
+ 'VAR1' => { value: 'value1' },
+ 'VAR2' => { value: 'value2', description: 'description2' },
+ 'VAR3' => { value: 'value3' }
+ )
+ end
+ end
+ end
end
context 'when using `extends`' do
@@ -1334,7 +1407,7 @@ module Gitlab
context "when an array of wrong keyed object is provided" do
let(:include_content) { [{ yolo: "/local.gitlab-ci.yml" }] }
- it_behaves_like 'returns errors', /needs to match exactly one accessor/
+ it_behaves_like 'returns errors', /does not have a valid subkey for include/
end
context "when an array of mixed typed objects is provided" do
@@ -1359,7 +1432,7 @@ module Gitlab
context "when the include type is incorrect" do
let(:include_content) { { name: "/local.gitlab-ci.yml" } }
- it_behaves_like 'returns errors', /needs to match exactly one accessor/
+ it_behaves_like 'returns errors', /does not have a valid subkey for include/
end
end
diff --git a/spec/lib/gitlab/cluster/lifecycle_events_spec.rb b/spec/lib/gitlab/cluster/lifecycle_events_spec.rb
index 5eea78acd98..45becb8370c 100644
--- a/spec/lib/gitlab/cluster/lifecycle_events_spec.rb
+++ b/spec/lib/gitlab/cluster/lifecycle_events_spec.rb
@@ -3,38 +3,55 @@
require 'spec_helper'
RSpec.describe Gitlab::Cluster::LifecycleEvents do
+ using RSpec::Parameterized::TableSyntax
+
# we create a new instance to ensure that we do not touch existing hooks
let(:replica) { Class.new(described_class) }
- context 'hooks execution' do
- using RSpec::Parameterized::TableSyntax
+ before do
+ # disable blackout period to speed-up tests
+ stub_config(shutdown: { blackout_seconds: 0 })
+ end
- where(:method, :hook_names) do
- :do_worker_start | %i[worker_start_hooks]
- :do_before_fork | %i[before_fork_hooks]
- :do_before_graceful_shutdown | %i[master_blackout_period master_graceful_shutdown]
- :do_before_master_restart | %i[master_restart_hooks]
+ context 'outside of clustered environments' do
+ where(:hook, :was_executed_immediately) do
+ :on_worker_start | true
+ :on_before_fork | false
+ :on_before_graceful_shutdown | false
+ :on_before_master_restart | false
+ :on_worker_stop | false
end
- before do
- # disable blackout period to speed-up tests
- stub_config(shutdown: { blackout_seconds: 0 })
+ with_them do
+ it 'executes the given block immediately' do
+ was_executed = false
+ replica.public_send(hook, &proc { was_executed = true })
+
+ expect(was_executed).to eq(was_executed_immediately)
+ end
end
+ end
- with_them do
- subject { replica.public_send(method) }
+ context 'in clustered environments' do
+ before do
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
+ replica.set_puma_options(workers: 2)
+ end
- it 'executes all hooks' do
- hook_names.each do |hook_name|
- hook = double
- replica.instance_variable_set(:"@#{hook_name}", [hook])
+ where(:hook, :execution_helper) do
+ :on_worker_start | :do_worker_start
+ :on_before_fork | :do_before_fork
+ :on_before_graceful_shutdown | :do_before_graceful_shutdown
+ :on_before_master_restart | :do_before_master_restart
+ :on_worker_stop | :do_worker_stop
+ end
- # ensure that proper hooks are called
- expect(hook).to receive(:call)
- expect(replica).to receive(:call).with(hook_name, anything).and_call_original
- end
+ with_them do
+ it 'requires explicit execution via do_* helper' do
+ was_executed = false
+ replica.public_send(hook, &proc { was_executed = true })
- subject
+ expect { replica.public_send(execution_helper) }.to change { was_executed }.from(false).to(true)
end
end
end
diff --git a/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb b/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
new file mode 100644
index 00000000000..cb13a711857
--- /dev/null
+++ b/spec/lib/gitlab/cluster/puma_worker_killer_initializer_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'puma_worker_killer'
+
+RSpec.describe Gitlab::Cluster::PumaWorkerKillerInitializer do
+ describe '.start' do
+ context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is false' do
+ before do
+ stub_env('GITLAB_MEMORY_WATCHDOG_ENABLED', 'false')
+ end
+
+ it 'configures and start PumaWorkerKiller' do
+ expect(PumaWorkerKiller).to receive(:config)
+ expect(PumaWorkerKiller).to receive(:start)
+
+ described_class.start({})
+ end
+ end
+
+ context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is not set' do
+ it 'configures and start PumaWorkerKiller' do
+ expect(PumaWorkerKiller).not_to receive(:config)
+ expect(PumaWorkerKiller).not_to receive(:start)
+
+ described_class.start({})
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
index 9af6aed2b02..963c9fe1576 100644
--- a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
+++ b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do
end
it 'reports deprecated database notice' do
- is_expected.to contain_exactly(notice_deprecated_database(old_database_version))
+ is_expected.to contain_exactly(notice_deprecated_database('main', old_database_version))
end
end
end
@@ -59,13 +59,13 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do
it 'reports deprecated database notice if the main database is using an old version' do
allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(old_database)
allow(Gitlab::Database::Reflection).to receive(:new).with(Ci::ApplicationRecord).and_return(new_database)
- is_expected.to contain_exactly(notice_deprecated_database(old_database_version))
+ is_expected.to contain_exactly(notice_deprecated_database('main', old_database_version))
end
it 'reports deprecated database notice if the ci database is using an old version' do
allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(new_database)
allow(Gitlab::Database::Reflection).to receive(:new).with(Ci::ApplicationRecord).and_return(old_database)
- is_expected.to contain_exactly(notice_deprecated_database(old_database_version))
+ is_expected.to contain_exactly(notice_deprecated_database('ci', old_database_version))
end
end
@@ -77,22 +77,23 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do
it 'reports deprecated database notice' do
is_expected.to match_array [
- notice_deprecated_database(old_database_version),
- notice_deprecated_database(old_database_version)
+ notice_deprecated_database('main', old_database_version),
+ notice_deprecated_database('ci', old_database_version)
]
end
end
end
end
- def notice_deprecated_database(database_version)
+ def notice_deprecated_database(database_name, database_version)
{
type: 'warning',
- message: _('You are using PostgreSQL %{pg_version_current}, but PostgreSQL ' \
- '%{pg_version_minimum} is required for this version of GitLab. ' \
- 'Please upgrade your environment to a supported PostgreSQL version, ' \
- 'see %{pg_requirements_url} for details.') % \
+ message: _('Database \'%{database_name}\' is using PostgreSQL %{pg_version_current}, ' \
+ 'but PostgreSQL %{pg_version_minimum} is required for this version of GitLab. ' \
+ 'Please upgrade your environment to a supported PostgreSQL version, ' \
+ 'see %{pg_requirements_url} for details.') % \
{
+ database_name: database_name,
pg_version_current: database_version,
pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION,
pg_requirements_url: Gitlab::ConfigChecker::ExternalDatabaseChecker::PG_REQUIREMENTS_LINK
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index 1fa6eee9813..165305476d2 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -3,18 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::Conflict::File do
- include GitHelpers
-
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
- let(:rugged) { rugged_repo(repository) }
- let(:their_commit) { rugged.branches['conflict-start'].target }
- let(:our_commit) { rugged.branches['conflict-resolvable'].target }
+ let(:their_commit) { TestEnv::BRANCH_SHA['conflict-start'] }
+ let(:our_commit) { TestEnv::BRANCH_SHA['conflict-resolvable'] }
let(:merge_request) { create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start', source_project: project) }
- let(:index) { rugged.merge_commits(our_commit, their_commit) }
- let(:rugged_conflict) { index.conflicts.last }
- let(:raw_conflict_content) { index.merge_file('files/ruby/regex.rb')[:data] }
- let(:raw_conflict_file) { Gitlab::Git::Conflict::File.new(repository, our_commit.oid, rugged_conflict, raw_conflict_content) }
+ let(:conflicts_client) { repository.gitaly_conflicts_client(our_commit, their_commit) }
+ let(:raw_conflict_files) { conflicts_client.list_conflict_files }
+ let(:conflict_file_name) { 'files/ruby/regex.rb' }
+ let(:raw_conflict_file) { raw_conflict_files.find { |conflict| conflict.our_path == conflict_file_name } }
let(:conflict_file) { described_class.new(raw_conflict_file, merge_request: merge_request) }
describe 'delegates' do
@@ -137,8 +134,7 @@ RSpec.describe Gitlab::Conflict::File do
end
context 'when there are unchanged trailing lines' do
- let(:rugged_conflict) { index.conflicts.first }
- let(:raw_conflict_content) { index.merge_file('files/ruby/popen.rb')[:data] }
+ let(:conflict_file_name) { 'files/ruby/popen.rb' }
it 'assign conflict types and adds match line to the end of the section' do
expect(diff_line_types).to eq(
@@ -294,6 +290,8 @@ RSpec.describe Gitlab::Conflict::File do
FILE
end
+ let(:conflict) { { ancestor: { path: '' }, theirs: { path: conflict_file_name }, ours: { path: conflict_file_name } } }
+ let(:raw_conflict_file) { Gitlab::Git::Conflict::File.new(repository, our_commit, conflict, raw_conflict_content) }
let(:sections) { conflict_file.sections }
it 'sets the correct match line headers' do
@@ -324,7 +322,7 @@ RSpec.describe Gitlab::Conflict::File do
describe '#as_json' do
it 'includes the blob path for the file' do
expect(conflict_file.as_json[:blob_path])
- .to eq("/#{project.full_path}/-/blob/#{our_commit.oid}/files/ruby/regex.rb")
+ .to eq("/#{project.full_path}/-/blob/#{our_commit}/files/ruby/regex.rb")
end
it 'includes the blob icon for the file' do
@@ -341,7 +339,8 @@ RSpec.describe Gitlab::Conflict::File do
describe '#conflict_type' do
using RSpec::Parameterized::TableSyntax
- let(:rugged_conflict) { { ancestor: { path: ancestor_path }, theirs: { path: their_path }, ours: { path: our_path } } }
+ let(:conflict) { { ancestor: { path: ancestor_path }, theirs: { path: their_path }, ours: { path: our_path } } }
+ let(:raw_conflict_file) { Gitlab::Git::Conflict::File.new(repository, our_commit, conflict, '') }
let(:diff_file) { double(renamed_file?: renamed_file?) }
subject(:conflict_type) { conflict_file.conflict_type(diff_file) }
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 6b1d8d8d1af..aadfb41a46e 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -53,6 +53,18 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['child_src']).to eq("#{directives['frame_src']} #{directives['worker_src']}")
end
+ describe 'the images-src directive' do
+ it 'can be loaded from anywhere' do
+ expect(directives['img_src']).to include('http: https:')
+ end
+ end
+
+ describe 'the media-src directive' do
+ it 'can be loaded from anywhere' do
+ expect(directives['media_src']).to include('http: https:')
+ end
+ end
+
context 'adds all websocket origins to support Safari' do
it 'with insecure domain' do
stub_config_setting(host: 'example.com', https: false)
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 2c239d5868a..544b210651b 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -3,10 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Build do
- let!(:tag_names) { %w(tag-1 tag-2) }
- let(:runner) { create(:ci_runner, :instance, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n) }) }
- let(:user) { create(:user, :public_email) }
- let(:build) { create(:ci_build, :running, runner: runner, user: user) }
+ let_it_be(:runner) { create(:ci_runner, :instance, :tagged_only) }
+ let_it_be(:user) { create(:user, :public_email) }
+ let_it_be(:ci_build) { create(:ci_build, :running, runner: runner, user: user) }
describe '.build' do
around do |example|
@@ -14,25 +13,26 @@ RSpec.describe Gitlab::DataBuilder::Build do
end
let(:data) do
- described_class.build(build)
+ described_class.build(ci_build)
end
it { expect(data).to be_a(Hash) }
- it { expect(data[:ref]).to eq(build.ref) }
- it { expect(data[:sha]).to eq(build.sha) }
- it { expect(data[:tag]).to eq(build.tag) }
- it { expect(data[:build_id]).to eq(build.id) }
- it { expect(data[:build_status]).to eq(build.status) }
- it { expect(data[:build_created_at]).to eq(build.created_at) }
- it { expect(data[:build_started_at]).to eq(build.started_at) }
- it { expect(data[:build_finished_at]).to eq(build.finished_at) }
- it { expect(data[:build_duration]).to eq(build.duration) }
- it { expect(data[:build_queued_duration]).to eq(build.queued_duration) }
+ it { expect(data[:ref]).to eq(ci_build.ref) }
+ it { expect(data[:sha]).to eq(ci_build.sha) }
+ it { expect(data[:tag]).to eq(ci_build.tag) }
+ it { expect(data[:build_id]).to eq(ci_build.id) }
+ it { expect(data[:build_status]).to eq(ci_build.status) }
+ it { expect(data[:build_created_at]).to eq(ci_build.created_at) }
+ it { expect(data[:build_started_at]).to eq(ci_build.started_at) }
+ it { expect(data[:build_finished_at]).to eq(ci_build.finished_at) }
+ it { expect(data[:build_duration]).to eq(ci_build.duration) }
+ it { expect(data[:build_queued_duration]).to eq(ci_build.queued_duration) }
it { expect(data[:build_allow_failure]).to eq(false) }
- it { expect(data[:build_failure_reason]).to eq(build.failure_reason) }
- it { expect(data[:project_id]).to eq(build.project.id) }
- it { expect(data[:project_name]).to eq(build.project.full_name) }
- it { expect(data[:pipeline_id]).to eq(build.pipeline.id) }
+ it { expect(data[:build_failure_reason]).to eq(ci_build.failure_reason) }
+ it { expect(data[:project_id]).to eq(ci_build.project.id) }
+ it { expect(data[:project_name]).to eq(ci_build.project.full_name) }
+ it { expect(data[:pipeline_id]).to eq(ci_build.pipeline.id) }
+ it { expect(data[:retries_count]).to eq(ci_build.retries_count) }
it {
expect(data[:user]).to eq(
@@ -45,44 +45,74 @@ RSpec.describe Gitlab::DataBuilder::Build do
})
}
- it { expect(data[:commit][:id]).to eq(build.pipeline.id) }
- it { expect(data[:runner][:id]).to eq(build.runner.id) }
- it { expect(data[:runner][:tags]).to match_array(tag_names) }
- it { expect(data[:runner][:description]).to eq(build.runner.description) }
- it { expect(data[:runner][:runner_type]).to eq(build.runner.runner_type) }
- it { expect(data[:runner][:is_shared]).to eq(build.runner.instance_type?) }
+ it { expect(data[:commit][:id]).to eq(ci_build.pipeline.id) }
+ it { expect(data[:runner][:id]).to eq(ci_build.runner.id) }
+ it { expect(data[:runner][:tags]).to match_array(%w(tag1 tag2)) }
+ it { expect(data[:runner][:description]).to eq(ci_build.runner.description) }
+ it { expect(data[:runner][:runner_type]).to eq(ci_build.runner.runner_type) }
+ it { expect(data[:runner][:is_shared]).to eq(ci_build.runner.instance_type?) }
it { expect(data[:environment]).to be_nil }
+ it 'does not exceed number of expected queries' do
+ ci_build # Make sure the Ci::Build model is created before recording.
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ b = Ci::Build.find(ci_build.id)
+ described_class.build(b) # Don't use ci_build variable here since it has all associations loaded into memory
+ end
+
+ expect(control.count).to eq(13)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(job_webhook_retries_count: false)
+ end
+
+ it { expect(data).not_to have_key(:retries_count) }
+
+ it 'does not exceed number of expected queries' do
+ ci_build # Make sure the Ci::Build model is created before recording.
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ b = Ci::Build.find(ci_build.id)
+ described_class.build(b) # Don't use ci_build variable here since it has all associations loaded into memory
+ end
+
+ expect(control.count).to eq(12)
+ end
+ end
+
context 'commit author_url' do
context 'when no commit present' do
- let(:build) { create(:ci_build) }
+ let(:build) { build(:ci_build) }
it 'sets to mailing address of git_author_email' do
- expect(data[:commit][:author_url]).to eq("mailto:#{build.pipeline.git_author_email}")
+ expect(data[:commit][:author_url]).to eq("mailto:#{ci_build.pipeline.git_author_email}")
end
end
context 'when commit present but has no author' do
- let(:build) { create(:ci_build, :with_commit) }
+ let(:ci_build) { build(:ci_build, :with_commit) }
it 'sets to mailing address of git_author_email' do
- expect(data[:commit][:author_url]).to eq("mailto:#{build.pipeline.git_author_email}")
+ expect(data[:commit][:author_url]).to eq("mailto:#{ci_build.pipeline.git_author_email}")
end
end
context 'when commit and author are present' do
- let(:build) { create(:ci_build, :with_commit_and_author) }
+ let(:ci_build) { build(:ci_build, :with_commit_and_author) }
it 'sets to GitLab user url' do
- expect(data[:commit][:author_url]).to eq(Gitlab::Routing.url_helpers.user_url(username: build.commit.author.username))
+ expect(data[:commit][:author_url]).to eq(Gitlab::Routing.url_helpers.user_url(username: ci_build.commit.author.username))
end
end
context 'with environment' do
- let(:build) { create(:ci_build, :teardown_environment) }
+ let(:ci_build) { build(:ci_build, :teardown_environment) }
- it { expect(data[:environment][:name]).to eq(build.expanded_environment_name) }
- it { expect(data[:environment][:action]).to eq(build.environment_action) }
+ it { expect(data[:environment][:name]).to eq(ci_build.expanded_environment_name) }
+ it { expect(data[:environment][:action]).to eq(ci_build.environment_action) }
end
end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index 46a12d8c6f6..eb348f5b497 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -103,6 +103,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(merge_request_attrs[:target_project_id]).to eq(merge_request.target_project_id)
expect(merge_request_attrs[:state]).to eq(merge_request.state)
expect(merge_request_attrs[:merge_status]).to eq(merge_request.public_merge_status)
+ expect(merge_request_attrs[:detailed_merge_status]).to eq("mergeable")
expect(merge_request_attrs[:url]).to eq("http://localhost/#{merge_request.target_project.full_path}/-/merge_requests/#{merge_request.iid}")
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 32746a46308..cc9f3d5b7f1 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -7,7 +7,15 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
it { is_expected.to be_a Gitlab::Database::SharedModel }
- it { expect(described_class::TIMEOUT_EXCEPTIONS).to match_array [ActiveRecord::StatementTimeout, ActiveRecord::ConnectionTimeoutError, ActiveRecord::AdapterTimeout, ActiveRecord::LockWaitTimeout] }
+ specify do
+ expect(described_class::TIMEOUT_EXCEPTIONS).to contain_exactly(
+ ActiveRecord::StatementTimeout,
+ ActiveRecord::ConnectionTimeoutError,
+ ActiveRecord::AdapterTimeout,
+ ActiveRecord::LockWaitTimeout,
+ ActiveRecord::QueryCanceled
+ )
+ end
describe 'associations' do
it { is_expected.to belong_to(:batched_migration).with_foreign_key(:batched_background_migration_id) }
@@ -272,7 +280,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
context 'when is a timeout exception' do
let(:exception) { ActiveRecord::StatementTimeout.new }
- it { expect(subject).to be_truthy }
+ it { expect(subject).to be_truthy }
+ end
+
+ context 'when is a QueryCanceled exception' do
+ let(:exception) { ActiveRecord::QueryCanceled.new }
+
+ it { expect(subject).to be_truthy }
end
context 'when is not a timeout exception' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 1ac9cbae036..31ae5e9b55d 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -211,6 +211,102 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(active_migration).to eq(migration3)
end
end
+
+ context 'when there are no active migrations available' do
+ it 'returns nil' do
+ expect(active_migration).to eq(nil)
+ end
+ end
+ end
+
+ describe '.find_executable' do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+ let(:migration_id) { migration.id }
+
+ subject(:executable_migration) { described_class.find_executable(migration_id, connection: connection) }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ context 'when the migration does not exist' do
+ let(:migration_id) { non_existing_record_id }
+
+ it 'returns nil' do
+ expect(executable_migration).to be_nil
+ end
+ end
+
+ context 'when the migration is not active' do
+ let!(:migration) { create(:batched_background_migration, :finished) }
+
+ it 'returns nil' do
+ expect(executable_migration).to be_nil
+ end
+ end
+
+ context 'when the migration is on hold' do
+ let!(:migration) { create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now) }
+
+ it 'returns nil' do
+ expect(executable_migration).to be_nil
+ end
+ end
+
+ context 'when the migration is not available for the current connection' do
+ let!(:migration) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_not_existing) }
+
+ it 'returns nil' do
+ expect(executable_migration).to be_nil
+ end
+ end
+
+ context 'when ther migration exists and is executable' do
+ let!(:migration) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_main) }
+
+ it 'returns the migration' do
+ expect(executable_migration).to eq(migration)
+ end
+ end
+ end
+
+ describe '.active_migrations_distinct_on_table' do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ it 'returns one pending executable migration per table' do
+ # non-active migration
+ create(:batched_background_migration, :finished)
+ # migration put on hold
+ create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now)
+ # migration not availab for the current connection
+ create(:batched_background_migration, :active, gitlab_schema: :gitlab_not_existing)
+ # active migration that is no longer on hold
+ migration_1 = create(:batched_background_migration, :active, table_name: :users, on_hold_until: 10.minutes.ago)
+ # another active migration for the same table
+ create(:batched_background_migration, :active, table_name: :users)
+ # active migration for different table
+ migration_2 = create(:batched_background_migration, :active, table_name: :projects)
+ # active migration for third table
+ create(:batched_background_migration, :active, table_name: :namespaces)
+
+ actual = described_class.active_migrations_distinct_on_table(connection: connection, limit: 2)
+
+ expect(actual).to eq([migration_1, migration_2])
+ end
+
+ it 'returns epmty collection when there are no pending executable migrations' do
+ actual = described_class.active_migrations_distinct_on_table(connection: connection, limit: 2)
+
+ expect(actual).to be_empty
+ end
end
describe '.created_after' do
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index a87b0c1a3a8..852cc719d01 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -330,7 +330,7 @@ RSpec.describe Gitlab::Database::BatchCount do
end
it 'counts with "id" field' do
- expect(described_class.batch_distinct_count(model, "#{column}")).to eq(2)
+ expect(described_class.batch_distinct_count(model, column.to_s)).to eq(2)
end
it 'counts with table.column field' do
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
index 34370c9a21f..7dc2e0be3e5 100644
--- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
record_type: 'A',
interval: 60,
disconnect_timeout: 120,
- use_tcp: false
+ use_tcp: false,
+ max_replica_pools: nil
)
expect(config.pool_size).to eq(Gitlab::Database.default_pool_size)
end
@@ -39,7 +40,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
replica_check_interval: 3,
hosts: %w[foo bar],
discover: {
- 'record' => 'foo.example.com'
+ record: 'foo.example.com',
+ max_replica_pools: 5
}
}
}
@@ -59,7 +61,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
record_type: 'A',
interval: 60,
disconnect_timeout: 120,
- use_tcp: false
+ use_tcp: false,
+ max_replica_pools: 5
)
expect(config.pool_size).to eq(4)
end
@@ -95,7 +98,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
record_type: 'A',
interval: 60,
disconnect_timeout: 120,
- use_tcp: false
+ use_tcp: false,
+ max_replica_pools: nil
)
expect(config.pool_size).to eq(4)
end
diff --git a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
index 41312dbedd6..a2076f5b950 100644
--- a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
end
Gitlab::Database::LoadBalancing::ConnectionProxy::NON_STICKY_READS.each do |name|
- describe "#{name}" do
+ describe name.to_s do
it 'runs the query on the replica' do
expect(proxy).to receive(:read_using_load_balancer)
.with(name, 'foo')
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
end
Gitlab::Database::LoadBalancing::ConnectionProxy::STICKY_WRITES.each do |name|
- describe "#{name}" do
+ describe name.to_s do
it 'runs the query on the primary and sticks to it' do
session = Gitlab::Database::LoadBalancing::Session.new
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery/sampler_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery/sampler_spec.rb
new file mode 100644
index 00000000000..1a49aa2871f
--- /dev/null
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery/sampler_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Database::LoadBalancing::ServiceDiscovery::Sampler do
+ let(:sampler) { described_class.new(max_replica_pools: max_replica_pools, seed: 100) }
+ let(:max_replica_pools) { 3 }
+ let(:address_class) { ::Gitlab::Database::LoadBalancing::ServiceDiscovery::Address }
+ let(:addresses) do
+ [
+ address_class.new("127.0.0.1", 6432),
+ address_class.new("127.0.0.1", 6433),
+ address_class.new("127.0.0.1", 6434),
+ address_class.new("127.0.0.1", 6435),
+ address_class.new("127.0.0.2", 6432),
+ address_class.new("127.0.0.2", 6433),
+ address_class.new("127.0.0.2", 6434),
+ address_class.new("127.0.0.2", 6435)
+ ]
+ end
+
+ describe '#sample' do
+ it 'samples max_replica_pools addresses' do
+ expect(sampler.sample(addresses).count).to eq(max_replica_pools)
+ end
+
+ it 'samples random ports across all hosts' do
+ expect(sampler.sample(addresses)).to eq([
+ address_class.new("127.0.0.1", 6432),
+ address_class.new("127.0.0.2", 6435),
+ address_class.new("127.0.0.1", 6435)
+ ])
+ end
+
+ it 'returns the same answer for the same input when called multiple times' do
+ result = sampler.sample(addresses)
+ expect(sampler.sample(addresses)).to eq(result)
+ expect(sampler.sample(addresses)).to eq(result)
+ end
+
+ it 'gives a consistent answer regardless of input ordering' do
+ expect(sampler.sample(addresses.reverse)).to eq(sampler.sample(addresses))
+ end
+
+ it 'samples fairly across all hosts' do
+ # Choose a bunch of different seeds to prove that it always chooses 2
+ # different ports from each host when selecting 4
+ (1..10).each do |seed|
+ sampler = described_class.new(max_replica_pools: 4, seed: seed)
+
+ result = sampler.sample(addresses)
+
+ expect(result.count { |r| r.hostname == "127.0.0.1" }).to eq(2)
+ expect(result.count { |r| r.hostname == "127.0.0.2" }).to eq(2)
+ end
+ end
+
+ context 'when input is an empty array' do
+ it 'returns an empty array' do
+ expect(sampler.sample([])).to eq([])
+ end
+ end
+
+ context 'when there are less replicas than max_replica_pools' do
+ let(:max_replica_pools) { 100 }
+
+ it 'returns the same addresses' do
+ expect(sampler.sample(addresses)).to eq(addresses)
+ end
+ end
+
+ context 'when max_replica_pools is nil' do
+ let(:max_replica_pools) { nil }
+
+ it 'returns the same addresses' do
+ expect(sampler.sample(addresses)).to eq(addresses)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
index f05910e5123..984d60e9962 100644
--- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
@@ -231,10 +231,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
nameserver: 'localhost',
port: 8600,
record: 'foo',
- record_type: record_type
+ record_type: record_type,
+ max_replica_pools: max_replica_pools
)
end
+ let(:max_replica_pools) { nil }
+
let(:packet) { double(:packet, answer: [res1, res2]) }
before do
@@ -266,24 +269,51 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
let(:res1) { double(:resource, host: 'foo1.service.consul.', port: 5432, weight: 1, priority: 1, ttl: 90) }
let(:res2) { double(:resource, host: 'foo2.service.consul.', port: 5433, weight: 1, priority: 1, ttl: 90) }
let(:res3) { double(:resource, host: 'foo3.service.consul.', port: 5434, weight: 1, priority: 1, ttl: 90) }
- let(:packet) { double(:packet, answer: [res1, res2, res3], additional: []) }
+ let(:res4) { double(:resource, host: 'foo4.service.consul.', port: 5432, weight: 1, priority: 1, ttl: 90) }
+ let(:packet) { double(:packet, answer: [res1, res2, res3, res4], additional: []) }
before do
expect_next_instance_of(Gitlab::Database::LoadBalancing::SrvResolver) do |resolver|
allow(resolver).to receive(:address_for).with('foo1.service.consul.').and_return(IPAddr.new('255.255.255.0'))
allow(resolver).to receive(:address_for).with('foo2.service.consul.').and_return(IPAddr.new('127.0.0.1'))
allow(resolver).to receive(:address_for).with('foo3.service.consul.').and_return(nil)
+ allow(resolver).to receive(:address_for).with('foo4.service.consul.').and_return("127.0.0.2")
end
end
it 'returns a TTL and ordered list of hosts' do
addresses = [
described_class::Address.new('127.0.0.1', 5433),
+ described_class::Address.new('127.0.0.2', 5432),
described_class::Address.new('255.255.255.0', 5432)
]
expect(service.addresses_from_dns).to eq([90, addresses])
end
+
+ context 'when max_replica_pools is set' do
+ context 'when the number of addresses exceeds max_replica_pools' do
+ let(:max_replica_pools) { 2 }
+
+ it 'limits to max_replica_pools' do
+ expect(service.addresses_from_dns[1].count).to eq(2)
+ end
+ end
+
+ context 'when the number of addresses is less than max_replica_pools' do
+ let(:max_replica_pools) { 5 }
+
+ it 'returns all addresses' do
+ addresses = [
+ described_class::Address.new('127.0.0.1', 5433),
+ described_class::Address.new('127.0.0.2', 5432),
+ described_class::Address.new('255.255.255.0', 5432)
+ ]
+
+ expect(service.addresses_from_dns).to eq([90, addresses])
+ end
+ end
+ end
end
context 'when the resolver returns an empty response' do
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index 88007de53d3..61b63016f1a 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -358,7 +358,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
end
def process_job(job)
- Sidekiq::JobRetry.new.local(worker_class, job.to_json, 'default') do
+ Sidekiq::JobRetry.new(Sidekiq).local(worker_class, job.to_json, 'default') do
worker_class.process_job(job)
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
index 6026d979bcf..1eb077fe6ca 100644
--- a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
@@ -4,18 +4,18 @@ require 'spec_helper'
RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis, :delete do
include StubENV
- let(:model) { ApplicationRecord }
+ let(:model) { ActiveRecord::Base }
let(:db_host) { model.connection_pool.db_config.host }
let(:test_table_name) { '_test_foo' }
before do
# Patch in our load balancer config, simply pointing at the test database twice
- allow(Gitlab::Database::LoadBalancing::Configuration).to receive(:for_model) do |base_model|
+ allow(Gitlab::Database::LoadBalancing::Configuration).to receive(:for_model).with(model) do |base_model|
Gitlab::Database::LoadBalancing::Configuration.new(base_model, [db_host, db_host])
end
- Gitlab::Database::LoadBalancing::Setup.new(ApplicationRecord).setup
+ Gitlab::Database::LoadBalancing::Setup.new(model).setup
model.connection.execute(<<~SQL)
CREATE TABLE IF NOT EXISTS #{test_table_name} (id SERIAL PRIMARY KEY, value INTEGER)
@@ -30,6 +30,10 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
model.connection.execute(<<~SQL)
DROP TABLE IF EXISTS #{test_table_name}
SQL
+
+ # reset load balancing to original state
+ allow(Gitlab::Database::LoadBalancing::Configuration).to receive(:for_model).and_call_original
+ Gitlab::Database::LoadBalancing::Setup.new(model).setup
end
def execute(conn)
@@ -56,6 +60,7 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
conn = model.connection
expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
+ expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :read_write_retry))
conn.transaction do
expect(conn).to be_transaction_open
@@ -74,6 +79,8 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .to receive(:warn).with(hash_including(event: :read_write_retry))
expect(conn).not_to be_transaction_open
@@ -105,6 +112,8 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
it 'retries when not in a transaction' do
expect(::Gitlab::Database::LoadBalancing::Logger)
.not_to receive(:warn).with(hash_including(event: :transaction_leak))
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .to receive(:warn).with(hash_including(event: :read_write_retry))
expect { execute(model.connection) }.not_to raise_error
end
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index 76dfaa74ae6..1c85abac91c 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -468,9 +468,10 @@ RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validat
payload = event.payload
assert =
- if payload[:name] == 'SCHEMA'
+ case payload[:name]
+ when 'SCHEMA'
false
- elsif payload[:name] == 'SQL' # Custom query
+ when 'SQL' # Custom query
true
else
keywords = %w[_test_load_balancing_test]
diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
index 2055dc33d48..0d75094a2fd 100644
--- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
@@ -35,15 +35,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
end
end
- context 'when the existing column has a default value' do
+ context 'when the existing column has a default function' do
before do
- migration.change_column_default :_test_table, existing_column, 'default value'
+ migration.change_column_default :_test_table, existing_column, -> { 'now()' }
end
it 'raises an error' do
expect do
migration.public_send(operation, :_test_table, :original, :renamed)
- end.to raise_error("#{operation} does not currently support columns with default values")
+ end.to raise_error("#{operation} does not currently support columns with default functions")
end
end
@@ -67,6 +67,94 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
end
end
+ context 'when the existing column has a default value' do
+ before do
+ migration.change_column_default :_test_table, existing_column, 'default value'
+ end
+
+ it 'creates the renamed column, syncing existing data' do
+ existing_record_1 = model.create!(status: 0, existing_column => 'existing')
+ existing_record_2 = model.create!(status: 0)
+
+ migration.send(operation, :_test_table, :original, :renamed)
+ model.reset_column_information
+
+ expect(migration.column_exists?(:_test_table, added_column)).to eq(true)
+
+ expect(existing_record_1.reload).to have_attributes(status: 0, original: 'existing', renamed: 'existing')
+ expect(existing_record_2.reload).to have_attributes(status: 0, original: 'default value', renamed: 'default value')
+ end
+
+ it 'installs triggers to sync new data' do
+ migration.public_send(operation, :_test_table, :original, :renamed)
+ model.reset_column_information
+
+ new_record_1 = model.create!(status: 1, original: 'first')
+ new_record_2 = model.create!(status: 1, renamed: 'second')
+ new_record_3 = model.create!(status: 1)
+ new_record_4 = model.create!(status: 1)
+
+ expect(new_record_1.reload).to have_attributes(status: 1, original: 'first', renamed: 'first')
+ expect(new_record_2.reload).to have_attributes(status: 1, original: 'second', renamed: 'second')
+ expect(new_record_3.reload).to have_attributes(status: 1, original: 'default value', renamed: 'default value')
+ expect(new_record_4.reload).to have_attributes(status: 1, original: 'default value', renamed: 'default value')
+
+ new_record_1.update!(original: 'updated')
+ new_record_2.update!(renamed: nil)
+ new_record_3.update!(renamed: 'update renamed')
+ new_record_4.update!(original: 'update original')
+
+ expect(new_record_1.reload).to have_attributes(status: 1, original: 'updated', renamed: 'updated')
+ expect(new_record_2.reload).to have_attributes(status: 1, original: nil, renamed: nil)
+ expect(new_record_3.reload).to have_attributes(status: 1, original: 'update renamed', renamed: 'update renamed')
+ expect(new_record_4.reload).to have_attributes(status: 1, original: 'update original', renamed: 'update original')
+ end
+ end
+
+ context 'when the existing column has a default value that evaluates to NULL' do
+ before do
+ migration.change_column_default :_test_table, existing_column, -> { "('test' || null)" }
+ end
+
+ it 'creates the renamed column, syncing existing data' do
+ existing_record_1 = model.create!(status: 0, existing_column => 'existing')
+ existing_record_2 = model.create!(status: 0)
+
+ migration.send(operation, :_test_table, :original, :renamed)
+ model.reset_column_information
+
+ expect(migration.column_exists?(:_test_table, added_column)).to eq(true)
+
+ expect(existing_record_1.reload).to have_attributes(status: 0, original: 'existing', renamed: 'existing')
+ expect(existing_record_2.reload).to have_attributes(status: 0, original: nil, renamed: nil)
+ end
+
+ it 'installs triggers to sync new data' do
+ migration.public_send(operation, :_test_table, :original, :renamed)
+ model.reset_column_information
+
+ new_record_1 = model.create!(status: 1, original: 'first')
+ new_record_2 = model.create!(status: 1, renamed: 'second')
+ new_record_3 = model.create!(status: 1)
+ new_record_4 = model.create!(status: 1)
+
+ expect(new_record_1.reload).to have_attributes(status: 1, original: 'first', renamed: 'first')
+ expect(new_record_2.reload).to have_attributes(status: 1, original: 'second', renamed: 'second')
+ expect(new_record_3.reload).to have_attributes(status: 1, original: nil, renamed: nil)
+ expect(new_record_4.reload).to have_attributes(status: 1, original: nil, renamed: nil)
+
+ new_record_1.update!(original: 'updated')
+ new_record_2.update!(renamed: nil)
+ new_record_3.update!(renamed: 'update renamed')
+ new_record_4.update!(original: 'update original')
+
+ expect(new_record_1.reload).to have_attributes(status: 1, original: 'updated', renamed: 'updated')
+ expect(new_record_2.reload).to have_attributes(status: 1, original: nil, renamed: nil)
+ expect(new_record_3.reload).to have_attributes(status: 1, original: 'update renamed', renamed: 'update renamed')
+ expect(new_record_4.reload).to have_attributes(status: 1, original: 'update original', renamed: 'update original')
+ end
+ end
+
it 'creates the renamed column, syncing existing data' do
existing_record_1 = model.create!(status: 0, existing_column => 'existing')
existing_record_2 = model.create!(status: 0, existing_column => nil)
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index bcdd5646994..65fbc8d9935 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -389,6 +389,40 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_index(:users, :foo)
end
+
+ context 'when targeting a partition table' do
+ let(:schema) { 'public' }
+ let(:name) { '_test_partition_01' }
+ let(:identifier) { "#{schema}.#{name}" }
+
+ before do
+ model.execute(<<~SQL)
+ CREATE TABLE public._test_partitioned_table (
+ id serial NOT NULL,
+ partition_id serial NOT NULL,
+ PRIMARY KEY (id, partition_id)
+ ) PARTITION BY LIST(partition_id);
+
+ CREATE TABLE #{identifier} PARTITION OF public._test_partitioned_table
+ FOR VALUES IN (1);
+ SQL
+ end
+
+ context 'when allow_partition is true' do
+ it 'creates the index concurrently' do
+ expect(model).to receive(:add_index).with(:_test_partition_01, :foo, algorithm: :concurrently)
+
+ model.add_concurrent_index(:_test_partition_01, :foo, allow_partition: true)
+ end
+ end
+
+ context 'when allow_partition is not provided' do
+ it 'raises ArgumentError' do
+ expect { model.add_concurrent_index(:_test_partition_01, :foo) }
+ .to raise_error(ArgumentError, /use add_concurrent_partitioned_index/)
+ end
+ end
+ end
end
context 'inside a transaction' do
@@ -435,6 +469,37 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.remove_concurrent_index(:users, :foo)
end
+ context 'when targeting a partition table' do
+ let(:schema) { 'public' }
+ let(:partition_table_name) { '_test_partition_01' }
+ let(:identifier) { "#{schema}.#{partition_table_name}" }
+ let(:index_name) { '_test_partitioned_index' }
+ let(:partition_index_name) { '_test_partition_01_partition_id_idx' }
+ let(:column_name) { 'partition_id' }
+
+ before do
+ model.execute(<<~SQL)
+ CREATE TABLE public._test_partitioned_table (
+ id serial NOT NULL,
+ partition_id serial NOT NULL,
+ PRIMARY KEY (id, partition_id)
+ ) PARTITION BY LIST(partition_id);
+
+ CREATE INDEX #{index_name} ON public._test_partitioned_table(#{column_name});
+
+ CREATE TABLE #{identifier} PARTITION OF public._test_partitioned_table
+ FOR VALUES IN (1);
+ SQL
+ end
+
+ context 'when dropping an index on the partition table' do
+ it 'raises ArgumentError' do
+ expect { model.remove_concurrent_index(partition_table_name, column_name) }
+ .to raise_error(ArgumentError, /use remove_concurrent_partitioned_index_by_name/)
+ end
+ end
+ end
+
describe 'by index name' do
before do
allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true)
@@ -476,6 +541,36 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.remove_concurrent_index_by_name(:users, "index_x_by_y")
end
+
+ context 'when targeting a partition table' do
+ let(:schema) { 'public' }
+ let(:partition_table_name) { '_test_partition_01' }
+ let(:identifier) { "#{schema}.#{partition_table_name}" }
+ let(:index_name) { '_test_partitioned_index' }
+ let(:partition_index_name) { '_test_partition_01_partition_id_idx' }
+
+ before do
+ model.execute(<<~SQL)
+ CREATE TABLE public._test_partitioned_table (
+ id serial NOT NULL,
+ partition_id serial NOT NULL,
+ PRIMARY KEY (id, partition_id)
+ ) PARTITION BY LIST(partition_id);
+
+ CREATE INDEX #{index_name} ON public._test_partitioned_table(partition_id);
+
+ CREATE TABLE #{identifier} PARTITION OF public._test_partitioned_table
+ FOR VALUES IN (1);
+ SQL
+ end
+
+ context 'when dropping an index on the partition table' do
+ it 'raises ArgumentError' do
+ expect { model.remove_concurrent_index_by_name(partition_table_name, partition_index_name) }
+ .to raise_error(ArgumentError, /use remove_concurrent_partitioned_index_by_name/)
+ end
+ end
+ end
end
end
end
@@ -1006,88 +1101,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#disable_statement_timeout' do
- it 'disables statement timeouts to current transaction only' do
- expect(model).to receive(:execute).with('SET LOCAL statement_timeout TO 0')
-
- model.disable_statement_timeout
- end
-
- # this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
- context 'with real environment', :delete do
- before do
- model.execute("SET statement_timeout TO '20000'")
- end
-
- after do
- model.execute('RESET statement_timeout')
- end
-
- it 'defines statement to 0 only for current transaction' do
- expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
-
- model.connection.transaction do
- model.disable_statement_timeout
- expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
- end
-
- expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
- end
-
- context 'when passing a blocks' do
- it 'disables statement timeouts on session level and executes the block' do
- expect(model).to receive(:execute).with('SET statement_timeout TO 0')
- expect(model).to receive(:execute).with('RESET statement_timeout').at_least(:once)
-
- expect { |block| model.disable_statement_timeout(&block) }.to yield_control
- end
-
- # this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
- context 'with real environment', :delete do
- before do
- model.execute("SET statement_timeout TO '20000'")
- end
-
- after do
- model.execute('RESET statement_timeout')
- end
-
- it 'defines statement to 0 for any code run inside the block' do
- expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
-
- model.disable_statement_timeout do
- model.connection.transaction do
- expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
- end
-
- expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
- end
- end
- end
- end
- end
-
- # This spec runs without an enclosing transaction (:delete truncation method for db_cleaner)
- context 'when the statement_timeout is already disabled', :delete do
- before do
- ActiveRecord::Migration.connection.execute('SET statement_timeout TO 0')
- end
-
- after do
- # Use ActiveRecord::Migration.connection instead of model.execute
- # so that this call is not counted below
- ActiveRecord::Migration.connection.execute('RESET statement_timeout')
- end
-
- it 'yields control without disabling the timeout or resetting' do
- expect(model).not_to receive(:execute).with('SET statement_timeout TO 0')
- expect(model).not_to receive(:execute).with('RESET statement_timeout')
-
- expect { |block| model.disable_statement_timeout(&block) }.to yield_control
- end
- end
- end
-
describe '#true_value' do
it 'returns the appropriate value' do
expect(model.true_value).to eq("'t'")
@@ -2006,8 +2019,116 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ let(:same_queue_different_worker) do
+ Class.new do
+ include Sidekiq::Worker
+
+ sidekiq_options queue: 'test'
+
+ def self.name
+ 'SameQueueDifferentWorkerClass'
+ end
+ end
+ end
+
+ let(:unrelated_worker) do
+ Class.new do
+ include Sidekiq::Worker
+
+ sidekiq_options queue: 'unrelated'
+
+ def self.name
+ 'UnrelatedWorkerClass'
+ end
+ end
+ end
+
before do
stub_const(worker.name, worker)
+ stub_const(unrelated_worker.name, unrelated_worker)
+ stub_const(same_queue_different_worker.name, same_queue_different_worker)
+ end
+
+ describe '#sidekiq_remove_jobs', :clean_gitlab_redis_queues do
+ def clear_queues
+ Sidekiq::Queue.new('test').clear
+ Sidekiq::Queue.new('unrelated').clear
+ Sidekiq::RetrySet.new.clear
+ Sidekiq::ScheduledSet.new.clear
+ end
+
+ around do |example|
+ clear_queues
+ Sidekiq::Testing.disable!(&example)
+ clear_queues
+ end
+
+ it "removes all related job instances from the job class's queue" do
+ worker.perform_async
+ same_queue_different_worker.perform_async
+ unrelated_worker.perform_async
+
+ queue_we_care_about = Sidekiq::Queue.new(worker.queue)
+ unrelated_queue = Sidekiq::Queue.new(unrelated_worker.queue)
+
+ expect(queue_we_care_about.size).to eq(2)
+ expect(unrelated_queue.size).to eq(1)
+
+ model.sidekiq_remove_jobs(job_klass: worker)
+
+ expect(queue_we_care_about.size).to eq(1)
+ expect(queue_we_care_about.map(&:klass)).not_to include(worker.name)
+ expect(queue_we_care_about.map(&:klass)).to include(
+ same_queue_different_worker.name
+ )
+ expect(unrelated_queue.size).to eq(1)
+ end
+
+ context 'when job instances are in the scheduled set' do
+ it 'removes all related job instances from the scheduled set' do
+ worker.perform_in(1.hour)
+ unrelated_worker.perform_in(1.hour)
+
+ scheduled = Sidekiq::ScheduledSet.new
+
+ expect(scheduled.size).to eq(2)
+ expect(scheduled.map(&:klass)).to include(
+ worker.name,
+ unrelated_worker.name
+ )
+
+ model.sidekiq_remove_jobs(job_klass: worker)
+
+ expect(scheduled.size).to eq(1)
+ expect(scheduled.map(&:klass)).not_to include(worker.name)
+ expect(scheduled.map(&:klass)).to include(unrelated_worker.name)
+ end
+ end
+
+ context 'when job instances are in the retry set' do
+ include_context 'when handling retried jobs'
+
+ it 'removes all related job instances from the retry set' do
+ retry_in(worker, 1.hour)
+ retry_in(worker, 2.hours)
+ retry_in(worker, 3.hours)
+ retry_in(unrelated_worker, 4.hours)
+
+ retries = Sidekiq::RetrySet.new
+
+ expect(retries.size).to eq(4)
+ expect(retries.map(&:klass)).to include(
+ worker.name,
+ unrelated_worker.name
+ )
+
+ model.sidekiq_remove_jobs(job_klass: worker)
+
+ expect(retries.size).to eq(1)
+ expect(retries.map(&:klass)).not_to include(worker.name)
+ expect(retries.map(&:klass)).to include(unrelated_worker.name)
+ end
+ end
end
describe '#sidekiq_queue_length' do
@@ -2031,7 +2152,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#migrate_sidekiq_queue' do
+ describe '#sidekiq_queue_migrate' do
it 'migrates jobs from one sidekiq queue to another' do
Sidekiq::Testing.disable! do
worker.perform_async('Something', [1])
@@ -2071,6 +2192,110 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#convert_to_type_column' do
+ it 'returns the name of the temporary column used to convert to bigint' do
+ expect(model.convert_to_type_column(:id, :int, :bigint)).to eq('id_convert_int_to_bigint')
+ end
+
+ it 'returns the name of the temporary column used to convert to uuid' do
+ expect(model.convert_to_type_column(:uuid, :string, :uuid)).to eq('uuid_convert_string_to_uuid')
+ end
+ end
+
+ describe '#create_temporary_columns_and_triggers' do
+ let(:table) { :test_table }
+ let(:column) { :id }
+ let(:mappings) do
+ {
+ id: {
+ from_type: :int,
+ to_type: :bigint
+ }
+ }
+ end
+
+ let(:old_bigint_column_naming) { false }
+
+ subject do
+ model.create_temporary_columns_and_triggers(
+ table,
+ mappings,
+ old_bigint_column_naming: old_bigint_column_naming
+ )
+ end
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer :id, primary_key: true
+ t.integer :non_nullable_column, null: false
+ t.integer :nullable_column
+ t.timestamps
+ end
+ end
+
+ context 'when no mappings are provided' do
+ let(:mappings) { nil }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error("No mappings for column conversion provided")
+ end
+ end
+
+ context 'when any of the mappings does not have the required keys' do
+ let(:mappings) do
+ {
+ id: {
+ from_type: :int
+ }
+ }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error("Some mappings don't have required keys provided")
+ end
+ end
+
+ context 'when the target table does not exist' do
+ it 'raises an error' do
+ expect { model.create_temporary_columns_and_triggers(:non_existent_table, mappings) }.to raise_error("Table non_existent_table does not exist")
+ end
+ end
+
+ context 'when the column to migrate does not exist' do
+ let(:missing_column) { :test }
+ let(:mappings) do
+ {
+ missing_column => {
+ from_type: :int,
+ to_type: :bigint
+ }
+ }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error("Column #{missing_column} does not exist on #{table}")
+ end
+ end
+
+ context 'when old_bigint_column_naming is true' do
+ let(:old_bigint_column_naming) { true }
+
+ it 'calls convert_to_bigint_column' do
+ expect(model).to receive(:convert_to_bigint_column).with(:id).and_return("id_convert_to_bigint")
+
+ subject
+ end
+ end
+
+ context 'when old_bigint_column_naming is false' do
+ it 'calls convert_to_type_column' do
+ expect(model).to receive(:convert_to_type_column).with(:id, :int, :bigint).and_return("id_convert_to_bigint")
+
+ subject
+ end
+ end
+ end
+
describe '#initialize_conversion_of_integer_to_bigint' do
let(:table) { :test_table }
let(:column) { :id }
@@ -2227,7 +2452,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:columns) { :id }
it 'removes column, trigger, and function' do
- temporary_column = model.convert_to_bigint_column(:id)
+ temporary_column = model.convert_to_bigint_column(columns)
trigger_name = model.rename_trigger_name(table, :id, temporary_column)
model.revert_initialize_conversion_of_integer_to_bigint(table, columns)
@@ -2420,101 +2645,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#ensure_batched_background_migration_is_finished' do
- let(:job_class_name) { 'CopyColumnUsingBackgroundMigrationJob' }
- let(:table) { :events }
- let(:column_name) { :id }
- let(:job_arguments) { [["id"], ["id_convert_to_bigint"], nil] }
-
- let(:configuration) do
- {
- job_class_name: job_class_name,
- table_name: table,
- column_name: column_name,
- job_arguments: job_arguments
- }
- end
-
- let(:migration_attributes) do
- configuration.merge(gitlab_schema: Gitlab::Database.gitlab_schemas_for_connection(model.connection).first)
- end
-
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
- end
-
- subject(:ensure_batched_background_migration_is_finished) { model.ensure_batched_background_migration_is_finished(**configuration) }
-
- it 'raises an error when migration exists and is not marked as finished' do
- expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
-
- create(:batched_background_migration, :active, migration_attributes)
-
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- allow(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(false)
- end
-
- expect { ensure_batched_background_migration_is_finished }
- .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':" \
- "\t#{configuration}" \
- "\n\n" \
- "Finalize it manually by running the following command in a `bash` or `sh` shell:" \
- "\n\n" \
- "\tsudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"id\"]\\,[\"id_convert_to_bigint\"]\\,null]']" \
- "\n\n" \
- "For more information, check the documentation" \
- "\n\n" \
- "\thttps://docs.gitlab.com/ee/user/admin_area/monitoring/background_migrations.html#database-migrations-failing-because-of-batched-background-migration-not-finished"
- end
-
- it 'does not raise error when migration exists and is marked as finished' do
- expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
-
- create(:batched_background_migration, :finished, migration_attributes)
-
- expect { ensure_batched_background_migration_is_finished }
- .not_to raise_error
- end
-
- it 'logs a warning when migration does not exist' do
- expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
-
- create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else))
-
- expect(Gitlab::AppLogger).to receive(:warn)
- .with("Could not find batched background migration for the given configuration: #{configuration}")
-
- expect { ensure_batched_background_migration_is_finished }
- .not_to raise_error
- end
-
- it 'finalizes the migration' do
- expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
-
- migration = create(:batched_background_migration, :active, configuration)
-
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(migration.finish!)
- end
-
- ensure_batched_background_migration_is_finished
- end
-
- context 'when the flag finalize is false' do
- it 'does not finalize the migration' do
- expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
-
- create(:batched_background_migration, :active, configuration)
-
- allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
- expect(runner).not_to receive(:finalize).with(job_class_name, table, column_name, job_arguments)
- end
-
- expect { model.ensure_batched_background_migration_is_finished(**configuration.merge(finalize: false)) }.to raise_error(RuntimeError)
- end
- end
- end
-
describe '#index_exists_by_name?' do
it 'returns true if an index exists' do
ActiveRecord::Migration.connection.execute(
@@ -2621,48 +2751,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#with_lock_retries' do
- let(:buffer) { StringIO.new }
- let(:in_memory_logger) { Gitlab::JsonLogger.new(buffer) }
- let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
-
- it 'sets the migration class name in the logs' do
- model.with_lock_retries(env: env, logger: in_memory_logger) {}
-
- buffer.rewind
- expect(buffer.read).to include("\"class\":\"#{model.class}\"")
- end
-
- where(raise_on_exhaustion: [true, false])
-
- with_them do
- it 'sets raise_on_exhaustion as requested' do
- with_lock_retries = double
- expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
- expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
-
- model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) {}
- end
- end
-
- it 'does not raise on exhaustion by default' do
- with_lock_retries = double
- expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
- expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
-
- model.with_lock_retries(env: env, logger: in_memory_logger) {}
- end
-
- it 'defaults to allowing subtransactions' do
- with_lock_retries = double
-
- expect(Gitlab::Database::WithLockRetries).to receive(:new).with(hash_including(allow_savepoints: true)).and_return(with_lock_retries)
- expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
-
- model.with_lock_retries(env: env, logger: in_memory_logger) {}
- end
- end
-
describe '#backfill_iids' do
include MigrationsHelpers
@@ -2778,720 +2866,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#check_constraint_name' do
- it 'returns a valid constraint name' do
- name = model.check_constraint_name(:this_is_a_very_long_table_name,
- :with_a_very_long_column_name,
- :with_a_very_long_type)
-
- expect(name).to be_an_instance_of(String)
- expect(name).to start_with('check_')
- expect(name.length).to eq(16)
- end
- end
-
- describe '#check_constraint_exists?' do
- before do
- ActiveRecord::Migration.connection.execute(
- 'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
- )
-
- ActiveRecord::Migration.connection.execute(
- 'CREATE SCHEMA new_test_schema'
- )
-
- ActiveRecord::Migration.connection.execute(
- 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
- )
-
- ActiveRecord::Migration.connection.execute(
- 'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
- )
- end
-
- it 'returns true if a constraint exists' do
- expect(model.check_constraint_exists?(:projects, 'check_1'))
- .to be_truthy
- end
-
- it 'returns false if a constraint does not exist' do
- expect(model.check_constraint_exists?(:projects, 'this_does_not_exist'))
- .to be_falsy
- end
-
- it 'returns false if a constraint with the same name exists in another table' do
- expect(model.check_constraint_exists?(:users, 'check_1'))
- .to be_falsy
- end
-
- it 'returns false if a constraint with the same name exists for the same table in another schema' do
- expect(model.check_constraint_exists?(:projects, 'check_2'))
- .to be_falsy
- end
- end
-
- describe '#add_check_constraint' do
- before do
- allow(model).to receive(:check_constraint_exists?).and_return(false)
- end
-
- context 'constraint name validation' do
- it 'raises an error when too long' do
- expect do
- model.add_check_constraint(
- :test_table,
- 'name IS NOT NULL',
- 'a' * (Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH + 1)
- )
- end.to raise_error(RuntimeError)
- end
-
- it 'does not raise error when the length is acceptable' do
- constraint_name = 'a' * Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
-
- expect(model).to receive(:transaction_open?).and_return(false)
- expect(model).to receive(:check_constraint_exists?).and_return(false)
- expect(model).to receive(:with_lock_retries).and_call_original
- expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
-
- model.add_check_constraint(
- :test_table,
- 'name IS NOT NULL',
- constraint_name,
- validate: false
- )
- end
- end
-
- context 'inside a transaction' do
- it 'raises an error' do
- expect(model).to receive(:transaction_open?).and_return(true)
-
- expect do
- model.add_check_constraint(
- :test_table,
- 'name IS NOT NULL',
- 'check_name_not_null'
- )
- end.to raise_error(RuntimeError)
- end
- end
-
- context 'outside a transaction' do
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
- end
-
- context 'when the constraint is already defined in the database' do
- it 'does not create a constraint' do
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, 'check_name_not_null')
- .and_return(true)
-
- expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
-
- # setting validate: false to only focus on the ADD CONSTRAINT command
- model.add_check_constraint(
- :test_table,
- 'name IS NOT NULL',
- 'check_name_not_null',
- validate: false
- )
- end
- end
-
- context 'when the constraint is not defined in the database' do
- it 'creates the constraint' do
- expect(model).to receive(:with_lock_retries).and_call_original
- expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
-
- # setting validate: false to only focus on the ADD CONSTRAINT command
- model.add_check_constraint(
- :test_table,
- 'char_length(name) <= 255',
- 'check_name_not_null',
- validate: false
- )
- end
- end
-
- context 'when validate is not provided' do
- it 'performs validation' do
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, 'check_name_not_null')
- .and_return(false).exactly(1)
-
- expect(model).to receive(:disable_statement_timeout).and_call_original
- expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/SET statement_timeout TO/)
- expect(model).to receive(:with_lock_retries).and_call_original
- expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
-
- # we need the check constraint to exist so that the validation proceeds
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, 'check_name_not_null')
- .and_return(true).exactly(1)
-
- expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
-
- model.add_check_constraint(
- :test_table,
- 'char_length(name) <= 255',
- 'check_name_not_null'
- )
- end
- end
-
- context 'when validate is provided with a falsey value' do
- it 'skips validation' do
- expect(model).not_to receive(:disable_statement_timeout)
- expect(model).to receive(:with_lock_retries).and_call_original
- expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
- expect(model).not_to receive(:execute).with(/VALIDATE CONSTRAINT/)
-
- model.add_check_constraint(
- :test_table,
- 'char_length(name) <= 255',
- 'check_name_not_null',
- validate: false
- )
- end
- end
-
- context 'when validate is provided with a truthy value' do
- it 'performs validation' do
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, 'check_name_not_null')
- .and_return(false).exactly(1)
-
- expect(model).to receive(:disable_statement_timeout).and_call_original
- expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/SET statement_timeout TO/)
- expect(model).to receive(:with_lock_retries).and_call_original
- expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
-
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, 'check_name_not_null')
- .and_return(true).exactly(1)
-
- expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
- expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
-
- model.add_check_constraint(
- :test_table,
- 'char_length(name) <= 255',
- 'check_name_not_null',
- validate: true
- )
- end
- end
- end
- end
-
- describe '#validate_check_constraint' do
- context 'when the constraint does not exist' do
- it 'raises an error' do
- error_message = /Could not find check constraint "check_1" on table "test_table"/
-
- expect(model).to receive(:check_constraint_exists?).and_return(false)
-
- expect do
- model.validate_check_constraint(:test_table, 'check_1')
- end.to raise_error(RuntimeError, error_message)
- end
- end
-
- context 'when the constraint exists' do
- it 'performs validation' do
- validate_sql = /ALTER TABLE test_table VALIDATE CONSTRAINT check_name/
-
- expect(model).to receive(:check_constraint_exists?).and_return(true)
- expect(model).to receive(:disable_statement_timeout).and_call_original
- expect(model).to receive(:statement_timeout_disabled?).and_return(false)
- expect(model).to receive(:execute).with(/SET statement_timeout TO/)
- expect(model).to receive(:execute).ordered.with(validate_sql)
- expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
-
- model.validate_check_constraint(:test_table, 'check_name')
- end
- end
- end
-
- describe '#remove_check_constraint' do
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
- end
-
- it 'removes the constraint' do
- drop_sql = /ALTER TABLE test_table\s+DROP CONSTRAINT IF EXISTS check_name/
-
- expect(model).to receive(:with_lock_retries).and_call_original
- expect(model).to receive(:execute).with(drop_sql)
-
- model.remove_check_constraint(:test_table, 'check_name')
- end
- end
-
- describe '#copy_check_constraints' do
- context 'inside a transaction' do
- it 'raises an error' do
- expect(model).to receive(:transaction_open?).and_return(true)
-
- expect do
- model.copy_check_constraints(:test_table, :old_column, :new_column)
- end.to raise_error(RuntimeError)
- end
- end
-
- context 'outside a transaction' do
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
- allow(model).to receive(:column_exists?).and_return(true)
- end
-
- let(:old_column_constraints) do
- [
- {
- 'schema_name' => 'public',
- 'table_name' => 'test_table',
- 'column_name' => 'old_column',
- 'constraint_name' => 'check_d7d49d475d',
- 'constraint_def' => 'CHECK ((old_column IS NOT NULL))'
- },
- {
- 'schema_name' => 'public',
- 'table_name' => 'test_table',
- 'column_name' => 'old_column',
- 'constraint_name' => 'check_48560e521e',
- 'constraint_def' => 'CHECK ((char_length(old_column) <= 255))'
- },
- {
- 'schema_name' => 'public',
- 'table_name' => 'test_table',
- 'column_name' => 'old_column',
- 'constraint_name' => 'custom_check_constraint',
- 'constraint_def' => 'CHECK (((old_column IS NOT NULL) AND (another_column IS NULL)))'
- },
- {
- 'schema_name' => 'public',
- 'table_name' => 'test_table',
- 'column_name' => 'old_column',
- 'constraint_name' => 'not_valid_check_constraint',
- 'constraint_def' => 'CHECK ((old_column IS NOT NULL)) NOT VALID'
- }
- ]
- end
-
- it 'copies check constraints from one column to another' do
- allow(model).to receive(:check_constraints_for)
- .with(:test_table, :old_column, schema: nil)
- .and_return(old_column_constraints)
-
- allow(model).to receive(:not_null_constraint_name).with(:test_table, :new_column)
- .and_return('check_1')
-
- allow(model).to receive(:text_limit_name).with(:test_table, :new_column)
- .and_return('check_2')
-
- allow(model).to receive(:check_constraint_name)
- .with(:test_table, :new_column, 'copy_check_constraint')
- .and_return('check_3')
-
- expect(model).to receive(:add_check_constraint)
- .with(
- :test_table,
- '(new_column IS NOT NULL)',
- 'check_1',
- validate: true
- ).once
-
- expect(model).to receive(:add_check_constraint)
- .with(
- :test_table,
- '(char_length(new_column) <= 255)',
- 'check_2',
- validate: true
- ).once
-
- expect(model).to receive(:add_check_constraint)
- .with(
- :test_table,
- '((new_column IS NOT NULL) AND (another_column IS NULL))',
- 'check_3',
- validate: true
- ).once
-
- expect(model).to receive(:add_check_constraint)
- .with(
- :test_table,
- '(new_column IS NOT NULL)',
- 'check_1',
- validate: false
- ).once
-
- model.copy_check_constraints(:test_table, :old_column, :new_column)
- end
-
- it 'does nothing if there are no constraints defined for the old column' do
- allow(model).to receive(:check_constraints_for)
- .with(:test_table, :old_column, schema: nil)
- .and_return([])
-
- expect(model).not_to receive(:add_check_constraint)
-
- model.copy_check_constraints(:test_table, :old_column, :new_column)
- end
-
- it 'raises an error when the orginating column does not exist' do
- allow(model).to receive(:column_exists?).with(:test_table, :old_column).and_return(false)
-
- error_message = /Column old_column does not exist on test_table/
-
- expect do
- model.copy_check_constraints(:test_table, :old_column, :new_column)
- end.to raise_error(RuntimeError, error_message)
- end
-
- it 'raises an error when the target column does not exist' do
- allow(model).to receive(:column_exists?).with(:test_table, :new_column).and_return(false)
-
- error_message = /Column new_column does not exist on test_table/
-
- expect do
- model.copy_check_constraints(:test_table, :old_column, :new_column)
- end.to raise_error(RuntimeError, error_message)
- end
- end
- end
-
- describe '#add_text_limit' do
- context 'when it is called with the default options' do
- it 'calls add_check_constraint with an infered constraint name and validate: true' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'max_length')
- check = "char_length(name) <= 255"
-
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:add_check_constraint)
- .with(:test_table, check, constraint_name, validate: true)
-
- model.add_text_limit(:test_table, :name, 255)
- end
- end
-
- context 'when all parameters are provided' do
- it 'calls add_check_constraint with the correct parameters' do
- constraint_name = 'check_name_limit'
- check = "char_length(name) <= 255"
-
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:add_check_constraint)
- .with(:test_table, check, constraint_name, validate: false)
-
- model.add_text_limit(
- :test_table,
- :name,
- 255,
- constraint_name: constraint_name,
- validate: false
- )
- end
- end
- end
-
- describe '#validate_text_limit' do
- context 'when constraint_name is not provided' do
- it 'calls validate_check_constraint with an infered constraint name' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'max_length')
-
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:validate_check_constraint)
- .with(:test_table, constraint_name)
-
- model.validate_text_limit(:test_table, :name)
- end
- end
-
- context 'when constraint_name is provided' do
- it 'calls validate_check_constraint with the correct parameters' do
- constraint_name = 'check_name_limit'
-
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:validate_check_constraint)
- .with(:test_table, constraint_name)
-
- model.validate_text_limit(:test_table, :name, constraint_name: constraint_name)
- end
- end
- end
-
- describe '#remove_text_limit' do
- context 'when constraint_name is not provided' do
- it 'calls remove_check_constraint with an infered constraint name' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'max_length')
-
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:remove_check_constraint)
- .with(:test_table, constraint_name)
-
- model.remove_text_limit(:test_table, :name)
- end
- end
-
- context 'when constraint_name is provided' do
- it 'calls remove_check_constraint with the correct parameters' do
- constraint_name = 'check_name_limit'
-
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:remove_check_constraint)
- .with(:test_table, constraint_name)
-
- model.remove_text_limit(:test_table, :name, constraint_name: constraint_name)
- end
- end
- end
-
- describe '#check_text_limit_exists?' do
- context 'when constraint_name is not provided' do
- it 'calls check_constraint_exists? with an infered constraint name' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'max_length')
-
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, constraint_name)
-
- model.check_text_limit_exists?(:test_table, :name)
- end
- end
-
- context 'when constraint_name is provided' do
- it 'calls check_constraint_exists? with the correct parameters' do
- constraint_name = 'check_name_limit'
-
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, constraint_name)
-
- model.check_text_limit_exists?(:test_table, :name, constraint_name: constraint_name)
- end
- end
- end
-
- describe '#add_not_null_constraint' do
- context 'when it is called with the default options' do
- it 'calls add_check_constraint with an infered constraint name and validate: true' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'not_null')
- check = "name IS NOT NULL"
-
- expect(model).to receive(:column_is_nullable?).and_return(true)
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:add_check_constraint)
- .with(:test_table, check, constraint_name, validate: true)
-
- model.add_not_null_constraint(:test_table, :name)
- end
- end
-
- context 'when all parameters are provided' do
- it 'calls add_check_constraint with the correct parameters' do
- constraint_name = 'check_name_not_null'
- check = "name IS NOT NULL"
-
- expect(model).to receive(:column_is_nullable?).and_return(true)
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:add_check_constraint)
- .with(:test_table, check, constraint_name, validate: false)
-
- model.add_not_null_constraint(
- :test_table,
- :name,
- constraint_name: constraint_name,
- validate: false
- )
- end
- end
-
- context 'when the column is defined as NOT NULL' do
- it 'does not add a check constraint' do
- expect(model).to receive(:column_is_nullable?).and_return(false)
- expect(model).not_to receive(:check_constraint_name)
- expect(model).not_to receive(:add_check_constraint)
-
- model.add_not_null_constraint(:test_table, :name)
- end
- end
- end
-
- describe '#validate_not_null_constraint' do
- context 'when constraint_name is not provided' do
- it 'calls validate_check_constraint with an infered constraint name' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'not_null')
-
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:validate_check_constraint)
- .with(:test_table, constraint_name)
-
- model.validate_not_null_constraint(:test_table, :name)
- end
- end
-
- context 'when constraint_name is provided' do
- it 'calls validate_check_constraint with the correct parameters' do
- constraint_name = 'check_name_not_null'
-
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:validate_check_constraint)
- .with(:test_table, constraint_name)
-
- model.validate_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
- end
- end
- end
-
- describe '#remove_not_null_constraint' do
- context 'when constraint_name is not provided' do
- it 'calls remove_check_constraint with an infered constraint name' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'not_null')
-
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:remove_check_constraint)
- .with(:test_table, constraint_name)
-
- model.remove_not_null_constraint(:test_table, :name)
- end
- end
-
- context 'when constraint_name is provided' do
- it 'calls remove_check_constraint with the correct parameters' do
- constraint_name = 'check_name_not_null'
-
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:remove_check_constraint)
- .with(:test_table, constraint_name)
-
- model.remove_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
- end
- end
- end
-
- describe '#check_not_null_constraint_exists?' do
- context 'when constraint_name is not provided' do
- it 'calls check_constraint_exists? with an infered constraint name' do
- constraint_name = model.check_constraint_name(:test_table,
- :name,
- 'not_null')
-
- expect(model).to receive(:check_constraint_name).and_call_original
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, constraint_name)
-
- model.check_not_null_constraint_exists?(:test_table, :name)
- end
- end
-
- context 'when constraint_name is provided' do
- it 'calls check_constraint_exists? with the correct parameters' do
- constraint_name = 'check_name_not_null'
-
- expect(model).not_to receive(:check_constraint_name)
- expect(model).to receive(:check_constraint_exists?)
- .with(:test_table, constraint_name)
-
- model.check_not_null_constraint_exists?(:test_table, :name, constraint_name: constraint_name)
- end
- end
- end
-
- describe '#create_extension' do
- subject { model.create_extension(extension) }
-
- let(:extension) { :btree_gist }
-
- it 'executes CREATE EXTENSION statement' do
- expect(model).to receive(:execute).with(/CREATE EXTENSION IF NOT EXISTS #{extension}/)
-
- subject
- end
-
- context 'without proper permissions' do
- before do
- allow(model).to receive(:execute)
- .with(/CREATE EXTENSION IF NOT EXISTS #{extension}/)
- .and_raise(ActiveRecord::StatementInvalid, 'InsufficientPrivilege: permission denied')
- end
-
- it 'raises an exception and prints an error message' do
- expect { subject }
- .to output(/user is not allowed/).to_stderr
- .and raise_error(ActiveRecord::StatementInvalid, /InsufficientPrivilege/)
- end
- end
- end
-
- describe '#drop_extension' do
- subject { model.drop_extension(extension) }
-
- let(:extension) { 'btree_gist' }
-
- it 'executes CREATE EXTENSION statement' do
- expect(model).to receive(:execute).with(/DROP EXTENSION IF EXISTS #{extension}/)
-
- subject
- end
-
- context 'without proper permissions' do
- before do
- allow(model).to receive(:execute)
- .with(/DROP EXTENSION IF EXISTS #{extension}/)
- .and_raise(ActiveRecord::StatementInvalid, 'InsufficientPrivilege: permission denied')
- end
-
- it 'raises an exception and prints an error message' do
- expect { subject }
- .to output(/user is not allowed/).to_stderr
- .and raise_error(ActiveRecord::StatementInvalid, /InsufficientPrivilege/)
- end
- end
- end
-
- describe '#rename_constraint' do
- it "executes the statement to rename constraint" do
- expect(model).to receive(:execute).with /ALTER TABLE "test_table"\nRENAME CONSTRAINT "fk_old_name" TO "fk_new_name"/
-
- model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
- end
- end
-
- describe '#drop_constraint' do
- it "executes the statement to drop the constraint" do
- expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" CASCADE\n")
-
- model.drop_constraint(:test_table, :constraint_name, cascade: true)
- end
-
- context 'when cascade option is false' do
- it "executes the statement to drop the constraint without cascade" do
- expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" \n")
-
- model.drop_constraint(:test_table, :constraint_name, cascade: false)
- end
- end
- end
-
describe '#add_primary_key_using_index' do
it "executes the statement to add the primary key" do
expect(model).to receive(:execute).with /ALTER TABLE "test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/
@@ -3558,4 +2932,36 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.add_sequence(:test_table, :test_column, :test_table_id_seq, 1)
end
end
+
+ describe "#partition?" do
+ subject { model.partition?(table_name) }
+
+ let(:table_name) { 'ci_builds_metadata' }
+
+ context "when a partition table exist" do
+ context 'when the view postgres_partitions exists' do
+ it 'calls the view', :aggregate_failures do
+ expect(Gitlab::Database::PostgresPartition).to receive(:partition_exists?).with(table_name).and_call_original
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when the view postgres_partitions does not exist' do
+ before do
+ allow(model).to receive(:view_exists?).and_return(false)
+ end
+
+ it 'does not call the view', :aggregate_failures do
+ expect(Gitlab::Database::PostgresPartition).to receive(:legacy_partition_exists?).with(table_name).and_call_original
+ expect(subject).to be_truthy
+ end
+ end
+ end
+
+ context "when a partition table does not exist" do
+ let(:table_name) { 'partition_does_not_exist' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index f21f1ac5e52..d4fff947c29 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -14,9 +14,6 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
shared_examples_for 'helpers that enqueue background migrations' do |worker_class, connection_class, tracking_database|
before do
allow(model).to receive(:tracking_database).and_return(tracking_database)
-
- # Due to lib/gitlab/database/load_balancing/configuration.rb:92 requiring RequestStore
- # we cannot use stub_feature_flags(force_no_sharing_primary_model: true)
allow(connection_class.connection.load_balancer.configuration)
.to receive(:use_dedicated_connection?).and_return(true)
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index a2f6e6b43ed..3e249b14f2e 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -425,4 +425,99 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
end
end
+
+ describe '#ensure_batched_background_migration_is_finished' do
+ let(:job_class_name) { 'CopyColumnUsingBackgroundMigrationJob' }
+ let(:table) { :events }
+ let(:column_name) { :id }
+ let(:job_arguments) { [["id"], ["id_convert_to_bigint"], nil] }
+
+ let(:configuration) do
+ {
+ job_class_name: job_class_name,
+ table_name: table,
+ column_name: column_name,
+ job_arguments: job_arguments
+ }
+ end
+
+ let(:migration_attributes) do
+ configuration.merge(gitlab_schema: Gitlab::Database.gitlab_schemas_for_connection(migration.connection).first)
+ end
+
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ end
+
+ subject(:ensure_batched_background_migration_is_finished) { migration.ensure_batched_background_migration_is_finished(**configuration) }
+
+ it 'raises an error when migration exists and is not marked as finished' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
+ create(:batched_background_migration, :active, migration_attributes)
+
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ allow(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(false)
+ end
+
+ expect { ensure_batched_background_migration_is_finished }
+ .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':" \
+ "\t#{configuration}" \
+ "\n\n" \
+ "Finalize it manually by running the following command in a `bash` or `sh` shell:" \
+ "\n\n" \
+ "\tsudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"id\"]\\,[\"id_convert_to_bigint\"]\\,null]']" \
+ "\n\n" \
+ "For more information, check the documentation" \
+ "\n\n" \
+ "\thttps://docs.gitlab.com/ee/user/admin_area/monitoring/background_migrations.html#database-migrations-failing-because-of-batched-background-migration-not-finished"
+ end
+
+ it 'does not raise error when migration exists and is marked as finished' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :finished, migration_attributes)
+
+ expect { ensure_batched_background_migration_is_finished }
+ .not_to raise_error
+ end
+
+ it 'logs a warning when migration does not exist' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else))
+
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with("Could not find batched background migration for the given configuration: #{configuration}")
+
+ expect { ensure_batched_background_migration_is_finished }
+ .not_to raise_error
+ end
+
+ it 'finalizes the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
+ migration = create(:batched_background_migration, :active, configuration)
+
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(migration.finish!)
+ end
+
+ ensure_batched_background_migration_is_finished
+ end
+
+ context 'when the flag finalize is false' do
+ it 'does not finalize the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :active, configuration)
+
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).not_to receive(:finalize).with(job_class_name, table, column_name, job_arguments)
+ end
+
+ expect { migration.ensure_batched_background_migration_is_finished(**configuration.merge(finalize: false)) }.to raise_error(RuntimeError)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
new file mode 100644
index 00000000000..6848fc85aa1
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/constraints_helpers_spec.rb
@@ -0,0 +1,679 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::ConstraintsHelpers do
+ let(:model) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ before do
+ allow(model).to receive(:puts)
+ end
+
+ describe '#check_constraint_name' do
+ it 'returns a valid constraint name' do
+ name = model.check_constraint_name(:this_is_a_very_long_table_name,
+ :with_a_very_long_column_name,
+ :with_a_very_long_type)
+
+ expect(name).to be_an_instance_of(String)
+ expect(name).to start_with('check_')
+ expect(name.length).to eq(16)
+ end
+ end
+
+ describe '#check_constraint_exists?' do
+ before do
+ ActiveRecord::Migration.connection.execute(
+ 'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
+ )
+
+ ActiveRecord::Migration.connection.execute(
+ 'CREATE SCHEMA new_test_schema'
+ )
+
+ ActiveRecord::Migration.connection.execute(
+ 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
+ )
+
+ ActiveRecord::Migration.connection.execute(
+ 'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
+ )
+ end
+
+ it 'returns true if a constraint exists' do
+ expect(model)
+ .to be_check_constraint_exists(:projects, 'check_1')
+ end
+
+ it 'returns false if a constraint does not exist' do
+ expect(model)
+ .not_to be_check_constraint_exists(:projects, 'this_does_not_exist')
+ end
+
+ it 'returns false if a constraint with the same name exists in another table' do
+ expect(model)
+ .not_to be_check_constraint_exists(:users, 'check_1')
+ end
+
+ it 'returns false if a constraint with the same name exists for the same table in another schema' do
+ expect(model)
+ .not_to be_check_constraint_exists(:projects, 'check_2')
+ end
+ end
+
+ describe '#add_check_constraint' do
+ before do
+ allow(model).to receive(:check_constraint_exists?).and_return(false)
+ end
+
+ context 'when constraint name validation' do
+ it 'raises an error when too long' do
+ expect do
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ 'a' * (Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH + 1)
+ )
+ end.to raise_error(RuntimeError)
+ end
+
+ it 'does not raise error when the length is acceptable' do
+ constraint_name = 'a' * Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
+
+ expect(model).to receive(:transaction_open?).and_return(false)
+ expect(model).to receive(:check_constraint_exists?).and_return(false)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ constraint_name,
+ validate: false
+ )
+ end
+ end
+
+ context 'when inside a transaction' do
+ it 'raises an error' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ 'check_name_not_null'
+ )
+ end.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'when outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'when the constraint is already defined in the database' do
+ it 'does not create a constraint' do
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(true)
+
+ expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
+
+ # setting validate: false to only focus on the ADD CONSTRAINT command
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ 'check_name_not_null',
+ validate: false
+ )
+ end
+ end
+
+ context 'when the constraint is not defined in the database' do
+ it 'creates the constraint' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
+
+ # setting validate: false to only focus on the ADD CONSTRAINT command
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null',
+ validate: false
+ )
+ end
+ end
+
+ context 'when validate is not provided' do
+ it 'performs validation' do
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(false).exactly(1)
+
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
+
+ # we need the check constraint to exist so that the validation proceeds
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(true).exactly(1)
+
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null'
+ )
+ end
+ end
+
+ context 'when validate is provided with a falsey value' do
+ it 'skips validation' do
+ expect(model).not_to receive(:disable_statement_timeout)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
+ expect(model).not_to receive(:execute).with(/VALIDATE CONSTRAINT/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null',
+ validate: false
+ )
+ end
+ end
+
+ context 'when validate is provided with a truthy value' do
+ it 'performs validation' do
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(false).exactly(1)
+
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
+
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(true).exactly(1)
+
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null',
+ validate: true
+ )
+ end
+ end
+ end
+ end
+
+ describe '#validate_check_constraint' do
+ context 'when the constraint does not exist' do
+ it 'raises an error' do
+ error_message = /Could not find check constraint "check_1" on table "test_table"/
+
+ expect(model).to receive(:check_constraint_exists?).and_return(false)
+
+ expect do
+ model.validate_check_constraint(:test_table, 'check_1')
+ end.to raise_error(RuntimeError, error_message)
+ end
+ end
+
+ context 'when the constraint exists' do
+ it 'performs validation' do
+ validate_sql = /ALTER TABLE test_table VALIDATE CONSTRAINT check_name/
+
+ expect(model).to receive(:check_constraint_exists?).and_return(true)
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:execute).ordered.with(validate_sql)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ model.validate_check_constraint(:test_table, 'check_name')
+ end
+ end
+ end
+
+ describe '#remove_check_constraint' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'removes the constraint' do
+ drop_sql = /ALTER TABLE test_table\s+DROP CONSTRAINT IF EXISTS check_name/
+
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(drop_sql)
+
+ model.remove_check_constraint(:test_table, 'check_name')
+ end
+ end
+
+ describe '#copy_check_constraints' do
+ context 'when inside a transaction' do
+ it 'raises an error' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'when outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ allow(model).to receive(:column_exists?).and_return(true)
+ end
+
+ let(:old_column_constraints) do
+ [
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'check_d7d49d475d',
+ 'constraint_def' => 'CHECK ((old_column IS NOT NULL))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'check_48560e521e',
+ 'constraint_def' => 'CHECK ((char_length(old_column) <= 255))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'custom_check_constraint',
+ 'constraint_def' => 'CHECK (((old_column IS NOT NULL) AND (another_column IS NULL)))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'not_valid_check_constraint',
+ 'constraint_def' => 'CHECK ((old_column IS NOT NULL)) NOT VALID'
+ }
+ ]
+ end
+
+ it 'copies check constraints from one column to another' do
+ allow(model).to receive(:check_constraints_for)
+ .with(:test_table, :old_column, schema: nil)
+ .and_return(old_column_constraints)
+
+ allow(model).to receive(:not_null_constraint_name).with(:test_table, :new_column)
+ .and_return('check_1')
+
+ allow(model).to receive(:text_limit_name).with(:test_table, :new_column)
+ .and_return('check_2')
+
+ allow(model).to receive(:check_constraint_name)
+ .with(:test_table, :new_column, 'copy_check_constraint')
+ .and_return('check_3')
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(new_column IS NOT NULL)',
+ 'check_1',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(char_length(new_column) <= 255)',
+ 'check_2',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '((new_column IS NOT NULL) AND (another_column IS NULL))',
+ 'check_3',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(new_column IS NOT NULL)',
+ 'check_1',
+ validate: false
+ ).once
+
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end
+
+ it 'does nothing if there are no constraints defined for the old column' do
+ allow(model).to receive(:check_constraints_for)
+ .with(:test_table, :old_column, schema: nil)
+ .and_return([])
+
+ expect(model).not_to receive(:add_check_constraint)
+
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end
+
+ it 'raises an error when the orginating column does not exist' do
+ allow(model).to receive(:column_exists?).with(:test_table, :old_column).and_return(false)
+
+ error_message = /Column old_column does not exist on test_table/
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError, error_message)
+ end
+
+ it 'raises an error when the target column does not exist' do
+ allow(model).to receive(:column_exists?).with(:test_table, :new_column).and_return(false)
+
+ error_message = /Column new_column does not exist on test_table/
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError, error_message)
+ end
+ end
+ end
+
+ describe '#add_text_limit' do
+ context 'when it is called with the default options' do
+ it 'calls add_check_constraint with an infered constraint name and validate: true' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+ check = "char_length(name) <= 255"
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:add_check_constraint)
+ .with(:test_table, check, constraint_name, validate: true)
+
+ model.add_text_limit(:test_table, :name, 255)
+ end
+ end
+
+ context 'when all parameters are provided' do
+ it 'calls add_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_limit'
+ check = "char_length(name) <= 255"
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:add_check_constraint)
+ .with(:test_table, check, constraint_name, validate: false)
+
+ model.add_text_limit(
+ :test_table,
+ :name,
+ 255,
+ constraint_name: constraint_name,
+ validate: false
+ )
+ end
+ end
+ end
+
+ describe '#validate_text_limit' do
+ context 'when constraint_name is not provided' do
+ it 'calls validate_check_constraint with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:validate_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.validate_text_limit(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls validate_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_limit'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:validate_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.validate_text_limit(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#remove_text_limit' do
+ context 'when constraint_name is not provided' do
+ it 'calls remove_check_constraint with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:remove_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.remove_text_limit(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls remove_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_limit'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:remove_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.remove_text_limit(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#check_text_limit_exists?' do
+ context 'when constraint_name is not provided' do
+ it 'calls check_constraint_exists? with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, constraint_name)
+
+ model.check_text_limit_exists?(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls check_constraint_exists? with the correct parameters' do
+ constraint_name = 'check_name_limit'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, constraint_name)
+
+ model.check_text_limit_exists?(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#add_not_null_constraint' do
+ context 'when it is called with the default options' do
+ it 'calls add_check_constraint with an infered constraint name and validate: true' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'not_null')
+ check = "name IS NOT NULL"
+
+ expect(model).to receive(:column_is_nullable?).and_return(true)
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:add_check_constraint)
+ .with(:test_table, check, constraint_name, validate: true)
+
+ model.add_not_null_constraint(:test_table, :name)
+ end
+ end
+
+ context 'when all parameters are provided' do
+ it 'calls add_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_not_null'
+ check = "name IS NOT NULL"
+
+ expect(model).to receive(:column_is_nullable?).and_return(true)
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:add_check_constraint)
+ .with(:test_table, check, constraint_name, validate: false)
+
+ model.add_not_null_constraint(
+ :test_table,
+ :name,
+ constraint_name: constraint_name,
+ validate: false
+ )
+ end
+ end
+
+ context 'when the column is defined as NOT NULL' do
+ it 'does not add a check constraint' do
+ expect(model).to receive(:column_is_nullable?).and_return(false)
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).not_to receive(:add_check_constraint)
+
+ model.add_not_null_constraint(:test_table, :name)
+ end
+ end
+ end
+
+ describe '#validate_not_null_constraint' do
+ context 'when constraint_name is not provided' do
+ it 'calls validate_check_constraint with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'not_null')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:validate_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.validate_not_null_constraint(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls validate_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_not_null'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:validate_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.validate_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#remove_not_null_constraint' do
+ context 'when constraint_name is not provided' do
+ it 'calls remove_check_constraint with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'not_null')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:remove_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.remove_not_null_constraint(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls remove_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_not_null'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:remove_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.remove_not_null_constraint(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#check_not_null_constraint_exists?' do
+ context 'when constraint_name is not provided' do
+ it 'calls check_constraint_exists? with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'not_null')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, constraint_name)
+
+ model.check_not_null_constraint_exists?(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls check_constraint_exists? with the correct parameters' do
+ constraint_name = 'check_name_not_null'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, constraint_name)
+
+ model.check_not_null_constraint_exists?(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#rename_constraint' do
+ it "executes the statement to rename constraint" do
+ expect(model).to receive(:execute).with(
+ /ALTER TABLE "test_table"\nRENAME CONSTRAINT "fk_old_name" TO "fk_new_name"/
+ )
+
+ model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
+ end
+ end
+
+ describe '#drop_constraint' do
+ it "executes the statement to drop the constraint" do
+ expect(model).to receive(:execute).with(
+ "ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" CASCADE\n"
+ )
+
+ model.drop_constraint(:test_table, :constraint_name, cascade: true)
+ end
+
+ context 'when cascade option is false' do
+ it "executes the statement to drop the constraint without cascade" do
+ expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" \n")
+
+ model.drop_constraint(:test_table, :constraint_name, cascade: false)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/extension_helpers_spec.rb b/spec/lib/gitlab/database/migrations/extension_helpers_spec.rb
new file mode 100644
index 00000000000..fb29e06bc01
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/extension_helpers_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::ExtensionHelpers do
+ let(:model) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ before do
+ allow(model).to receive(:puts)
+ end
+
+ describe '#create_extension' do
+ subject { model.create_extension(extension) }
+
+ let(:extension) { :btree_gist }
+
+ it 'executes CREATE EXTENSION statement' do
+ expect(model).to receive(:execute).with(/CREATE EXTENSION IF NOT EXISTS #{extension}/)
+
+ subject
+ end
+
+ context 'without proper permissions' do
+ before do
+ allow(model).to receive(:execute)
+ .with(/CREATE EXTENSION IF NOT EXISTS #{extension}/)
+ .and_raise(ActiveRecord::StatementInvalid, 'InsufficientPrivilege: permission denied')
+ end
+
+ it 'raises an exception and prints an error message' do
+ expect { subject }
+ .to output(/user is not allowed/).to_stderr
+ .and raise_error(ActiveRecord::StatementInvalid, /InsufficientPrivilege/)
+ end
+ end
+ end
+
+ describe '#drop_extension' do
+ subject { model.drop_extension(extension) }
+
+ let(:extension) { 'btree_gist' }
+
+ it 'executes CREATE EXTENSION statement' do
+ expect(model).to receive(:execute).with(/DROP EXTENSION IF EXISTS #{extension}/)
+
+ subject
+ end
+
+ context 'without proper permissions' do
+ before do
+ allow(model).to receive(:execute)
+ .with(/DROP EXTENSION IF EXISTS #{extension}/)
+ .and_raise(ActiveRecord::StatementInvalid, 'InsufficientPrivilege: permission denied')
+ end
+
+ it 'raises an exception and prints an error message' do
+ expect { subject }
+ .to output(/user is not allowed/).to_stderr
+ .and raise_error(ActiveRecord::StatementInvalid, /InsufficientPrivilege/)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/lock_retries_helpers_spec.rb b/spec/lib/gitlab/database/migrations/lock_retries_helpers_spec.rb
new file mode 100644
index 00000000000..a8739f6758f
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/lock_retries_helpers_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::LockRetriesHelpers do
+ let(:model) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ describe '#with_lock_retries' do
+ let(:buffer) { StringIO.new }
+ let(:in_memory_logger) { Gitlab::JsonLogger.new(buffer) }
+ let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
+
+ it 'sets the migration class name in the logs' do
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
+
+ buffer.rewind
+ expect(buffer.read).to include("\"class\":\"#{model.class}\"")
+ end
+
+ where(raise_on_exhaustion: [true, false])
+
+ with_them do
+ it 'sets raise_on_exhaustion as requested' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) {}
+ end
+ end
+
+ it 'does not raise on exhaustion by default' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
+ end
+
+ it 'defaults to allowing subtransactions' do
+ with_lock_retries = double
+
+ expect(Gitlab::Database::WithLockRetries)
+ .to receive(:new).with(hash_including(allow_savepoints: true)).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index f364ebfa522..bd382547689 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -2,26 +2,65 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Runner, :reestablished_active_record_base do
- include Database::MultipleDatabases
-
let(:base_result_dir) { Pathname.new(Dir.mktmpdir) }
let(:migration_runs) { [] } # This list gets populated as the runner tries to run migrations
# Tests depend on all of these lists being sorted in the order migrations would be applied
- let(:applied_migrations_other_branches) { [double(ActiveRecord::Migration, version: 1, name: 'migration_complete_other_branch')] }
+ let(:applied_migrations_other_branches) do
+ [
+ double(
+ ActiveRecord::Migration,
+ version: 1,
+ name: 'migration_complete_other_branch',
+ filename: 'db/migrate/1_migration_complete_other_branch.rb'
+ )
+ ]
+ end
let(:applied_migrations_this_branch) do
[
- double(ActiveRecord::Migration, version: 2, name: 'older_migration_complete_this_branch'),
- double(ActiveRecord::Migration, version: 3, name: 'newer_migration_complete_this_branch')
+ double(
+ ActiveRecord::Migration,
+ version: 2,
+ name: 'older_migration_complete_this_branch',
+ filename: 'db/migrate/2_older_migration_complete_this_branch.rb'
+ ),
+ double(
+ ActiveRecord::Migration,
+ version: 3,
+ name: 'post_migration_complete_this_branch',
+ filename: 'db/post_migrate/3_post_migration_complete_this_branch.rb'
+ ),
+ double(
+ ActiveRecord::Migration,
+ version: 4,
+ name: 'newer_migration_complete_this_branch',
+ filename: 'db/migrate/4_newer_migration_complete_this_branch.rb'
+ )
].sort_by(&:version)
end
let(:pending_migrations) do
[
- double(ActiveRecord::Migration, version: 4, name: 'older_migration_pending'),
- double(ActiveRecord::Migration, version: 5, name: 'newer_migration_pending')
+ double(
+ ActiveRecord::Migration,
+ version: 5,
+ name: 'older_migration_pending',
+ filename: 'db/migrate/5_older_migration_pending.rb'
+ ),
+ double(
+ ActiveRecord::Migration,
+ version: 6,
+ name: 'post_migration_pending',
+ filename: 'db/post_migrate/6_post_migration_pending.rb'
+ ),
+ double(
+ ActiveRecord::Migration,
+ version: 7,
+ name: 'newer_migration_pending',
+ filename: 'db/migrate/7_newer_migration_pending.rb'
+ )
].sort_by(&:version)
end
@@ -87,11 +126,11 @@ RSpec.describe Gitlab::Database::Migrations::Runner, :reestablished_active_recor
context 'running migrations' do
subject(:up) { described_class.up(database: database, legacy_mode: legacy_mode) }
- it 'runs the unapplied migrations in version order', :aggregate_failures do
+ it 'runs the unapplied migrations in regular/post order, then version order', :aggregate_failures do
up.run
- expect(migration_runs.map(&:dir)).to match_array([:up, :up])
- expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version))
+ expect(migration_runs.map(&:dir)).to match_array([:up, :up, :up])
+ expect(migration_runs.map(&:version_to_migrate)).to eq([5, 7, 6])
end
it 'writes a metadata file with the current schema version and database name' do
@@ -130,8 +169,8 @@ RSpec.describe Gitlab::Database::Migrations::Runner, :reestablished_active_recor
it 'runs the applied migrations for the current branch in reverse order', :aggregate_failures do
down.run
- expect(migration_runs.map(&:dir)).to match_array([:down, :down])
- expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version))
+ expect(migration_runs.map(&:dir)).to match_array([:down, :down, :down])
+ expect(migration_runs.map(&:version_to_migrate)).to eq([3, 4, 2])
end
end
diff --git a/spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb b/spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb
new file mode 100644
index 00000000000..d35211af680
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::TimeoutHelpers do
+ let(:model) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ describe '#disable_statement_timeout' do
+ it 'disables statement timeouts to current transaction only' do
+ expect(model).to receive(:execute).with('SET LOCAL statement_timeout TO 0')
+
+ model.disable_statement_timeout
+ end
+
+ # this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
+ context 'with real environment', :delete do
+ before do
+ model.execute("SET statement_timeout TO '20000'")
+ end
+
+ after do
+ model.execute('RESET statement_timeout')
+ end
+
+ it 'defines statement to 0 only for current transaction' do
+ expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
+
+ model.connection.transaction do
+ model.disable_statement_timeout
+ expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
+ end
+
+ expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
+ end
+
+ context 'when passing a blocks' do
+ it 'disables statement timeouts on session level and executes the block' do
+ expect(model).to receive(:execute).with('SET statement_timeout TO 0')
+ expect(model).to receive(:execute).with('RESET statement_timeout').at_least(:once)
+
+ expect { |block| model.disable_statement_timeout(&block) }.to yield_control
+ end
+
+ # this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
+ context 'with real environment', :delete do
+ before do
+ model.execute("SET statement_timeout TO '20000'")
+ end
+
+ after do
+ model.execute('RESET statement_timeout')
+ end
+
+ it 'defines statement to 0 for any code run inside the block' do
+ expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
+
+ model.disable_statement_timeout do
+ model.connection.transaction do
+ expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
+ end
+
+ expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
+ end
+ end
+ end
+ end
+ end
+
+ # This spec runs without an enclosing transaction (:delete truncation method for db_cleaner)
+ context 'when the statement_timeout is already disabled', :delete do
+ before do
+ ActiveRecord::Migration.connection.execute('SET statement_timeout TO 0')
+ end
+
+ after do
+ # Use ActiveRecord::Migration.connection instead of model.execute
+ # so that this call is not counted below
+ ActiveRecord::Migration.connection.execute('RESET statement_timeout')
+ end
+
+ it 'yields control without disabling the timeout or resetting' do
+ expect(model).not_to receive(:execute).with('SET statement_timeout TO 0')
+ expect(model).not_to receive(:execute).with('RESET statement_timeout')
+
+ expect { |block| model.disable_statement_timeout(&block) }.to yield_control
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb b/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb
index 0e804b4feac..cd3a94f5737 100644
--- a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
let(:referenced_table_name) { '_test_referenced_table' }
let(:other_referenced_table_name) { '_test_other_referenced_table' }
let(:parent_table_name) { "#{table_name}_parent" }
+ let(:lock_tables) { [] }
let(:model) { define_batchable_model(table_name, connection: connection) }
@@ -27,7 +28,8 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
table_name: table_name,
partitioning_column: partitioning_column,
parent_table_name: parent_table_name,
- zero_partition_value: partitioning_default
+ zero_partition_value: partitioning_default,
+ lock_tables: lock_tables
)
end
@@ -168,6 +170,16 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
end
end
+ context 'with locking tables' do
+ let(:lock_tables) { [table_name] }
+
+ it 'locks the table' do
+ recorder = ActiveRecord::QueryRecorder.new { partition }
+
+ expect(recorder.log).to include(/LOCK "_test_table_to_partition" IN ACCESS EXCLUSIVE MODE/)
+ end
+ end
+
context 'when an error occurs during the conversion' do
def fail_first_time
# We can't directly use a boolean here, as we need something that will be passed by-reference to the proc
diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
index 2ef873e8adb..336dec3a8a0 100644
--- a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb
@@ -92,11 +92,11 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
context 'removing foreign keys' do
it 'removes foreign keys from the table before dropping it' do
- expect(dropper).to receive(:drop_detached_partition).and_wrap_original do |drop_method, partition_name|
- expect(partition_name).to eq('test_partition')
- expect(foreign_key_exists_by_name(partition_name, 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_falsey
+ expect(dropper).to receive(:drop_detached_partition).and_wrap_original do |drop_method, partition|
+ expect(partition.table_name).to eq('test_partition')
+ expect(foreign_key_exists_by_name(partition.table_name, 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_falsey
- drop_method.call(partition_name)
+ drop_method.call(partition)
end
expect(foreign_key_exists_by_name('test_partition', 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_truthy
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
index 7465f69b87c..a81c8a5a49c 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
@@ -65,8 +65,11 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
end
def expect_add_concurrent_index_and_call_original(table, column, index)
- expect(migration).to receive(:add_concurrent_index).ordered.with(table, column, { name: index })
- .and_wrap_original { |_, table, column, options| connection.add_index(table, column, **options) }
+ expect(migration).to receive(:add_concurrent_index).ordered.with(table, column, { name: index, allow_partition: true })
+ .and_wrap_original do |_, table, column, options|
+ options.delete(:allow_partition)
+ connection.add_index(table, column, **options)
+ end
end
end
@@ -91,7 +94,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
it 'forwards them to the index helper methods', :aggregate_failures do
expect(migration).to receive(:add_concurrent_index)
- .with(partition1_identifier, column_name, { name: partition1_index, where: 'x > 0', unique: true })
+ .with(partition1_identifier, column_name, { name: partition1_index, where: 'x > 0', unique: true, allow_partition: true })
expect(migration).to receive(:add_index)
.with(table_name, column_name, { name: index_name, where: 'x > 0', unique: true })
@@ -231,4 +234,165 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
end
end
end
+
+ describe '#indexes_by_definition_for_table' do
+ context 'when a partitioned table has indexes' do
+ subject do
+ migration.indexes_by_definition_for_table(table_name)
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ it 'captures partitioned index names by index definition' do
+ expect(subject).to match(a_hash_including({ "CREATE _ btree (#{column_name})" => index_name }))
+ end
+ end
+
+ context 'when a non-partitioned table has indexes' do
+ let(:regular_table_name) { '_test_regular_table' }
+ let(:regular_index_name) { '_test_regular_index_name' }
+
+ subject do
+ migration.indexes_by_definition_for_table(regular_table_name)
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{regular_table_name} (
+ #{column_name} timestamptz NOT NULL
+ );
+
+ CREATE INDEX #{regular_index_name} ON #{regular_table_name} (#{column_name});
+ SQL
+ end
+
+ it 'captures index names by index definition' do
+ expect(subject).to match(a_hash_including({ "CREATE _ btree (#{column_name})" => regular_index_name }))
+ end
+ end
+
+ context 'when a non-partitioned table has duplicate indexes' do
+ let(:regular_table_name) { '_test_regular_table' }
+ let(:regular_index_name) { '_test_regular_index_name' }
+ let(:duplicate_index_name) { '_test_duplicate_index_name' }
+
+ subject do
+ migration.indexes_by_definition_for_table(regular_table_name)
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{regular_table_name} (
+ #{column_name} timestamptz NOT NULL
+ );
+
+ CREATE INDEX #{regular_index_name} ON #{regular_table_name} (#{column_name});
+ CREATE INDEX #{duplicate_index_name} ON #{regular_table_name} (#{column_name});
+ SQL
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error { described_class::DuplicatedIndexesError }
+ end
+ end
+ end
+
+ describe '#rename_indexes_for_table' do
+ let(:original_table_name) { '_test_rename_indexes_table' }
+ let(:first_partition_name) { '_test_rename_indexes_table_1' }
+ let(:transient_table_name) { '_test_rename_indexes_table_child' }
+ let(:custom_column_name) { 'created_at' }
+ let(:generated_column_name) { 'updated_at' }
+ let(:custom_index_name) { 'index_test_rename_indexes_table_on_created_at' }
+ let(:custom_index_name_regenerated) { '_test_rename_indexes_table_created_at_idx' }
+ let(:generated_index_name) { '_test_rename_indexes_table_updated_at_idx' }
+ let(:generated_index_name_collided) { '_test_rename_indexes_table_updated_at_idx1' }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{original_table_name} (
+ #{custom_column_name} timestamptz NOT NULL,
+ #{generated_column_name} timestamptz NOT NULL
+ );
+
+ CREATE INDEX #{custom_index_name} ON #{original_table_name} (#{custom_column_name});
+ CREATE INDEX ON #{original_table_name} (#{generated_column_name});
+ SQL
+ end
+
+ context 'when changing a table within the current schema' do
+ let!(:identifiers) { migration.indexes_by_definition_for_table(original_table_name) }
+
+ before do
+ connection.execute(<<~SQL)
+ ALTER TABLE #{original_table_name} RENAME TO #{first_partition_name};
+ CREATE TABLE #{original_table_name} (LIKE #{first_partition_name} INCLUDING ALL);
+ DROP TABLE #{first_partition_name};
+ SQL
+ end
+
+ it 'maps index names after they are changed' do
+ migration.rename_indexes_for_table(original_table_name, identifiers)
+
+ expect_index_to_exist(custom_index_name)
+ expect_index_to_exist(generated_index_name)
+ end
+
+ it 'does not rename an index which does not exist in the to_hash' do
+ partial_identifiers = identifiers.reject { |_, name| name == custom_index_name }
+
+ migration.rename_indexes_for_table(original_table_name, partial_identifiers)
+
+ expect_index_not_to_exist(custom_index_name)
+ expect_index_to_exist(generated_index_name)
+ end
+ end
+
+ context 'when partitioning an existing table' do
+ before do
+ connection.execute(<<~SQL)
+ /* Create new parent table */
+ CREATE TABLE #{first_partition_name} (LIKE #{original_table_name} INCLUDING ALL);
+ SQL
+ end
+
+ it 'renames indexes across schemas' do
+ # Capture index names generated by postgres
+ generated_index_names = migration.indexes_by_definition_for_table(first_partition_name)
+
+ # Capture index names from original table
+ original_index_names = migration.indexes_by_definition_for_table(original_table_name)
+
+ connection.execute(<<~SQL)
+ /* Rename original table out of the way */
+ ALTER TABLE #{original_table_name} RENAME TO #{transient_table_name};
+
+ /* Rename new parent table to original name */
+ ALTER TABLE #{first_partition_name} RENAME TO #{original_table_name};
+
+ /* Move original table to gitlab_partitions_dynamic schema */
+ ALTER TABLE #{transient_table_name} SET SCHEMA #{partition_schema};
+
+ /* Rename original table to be the first partition */
+ ALTER TABLE #{partition_schema}.#{transient_table_name} RENAME TO #{first_partition_name};
+ SQL
+
+ # Apply index names generated by postgres to first partition
+ migration.rename_indexes_for_table(first_partition_name, generated_index_names, schema_name: partition_schema)
+
+ expect_index_to_exist('_test_rename_indexes_table_1_created_at_idx')
+ expect_index_to_exist('_test_rename_indexes_table_1_updated_at_idx')
+
+ # Apply index names from original table to new parent table
+ migration.rename_indexes_for_table(original_table_name, original_index_names)
+
+ expect_index_to_exist(custom_index_name)
+ expect_index_to_exist(generated_index_name)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 8bb9ad2737a..e76b1da3834 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -43,6 +43,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'list partitioning conversion helpers' do
shared_examples_for 'delegates to ConvertTableToFirstListPartition' do
+ let(:extra_options) { {} }
it 'throws an error if in a transaction' do
allow(migration).to receive(:transaction_open?).and_return(true)
expect { migrate }.to raise_error(/cannot be run inside a transaction/)
@@ -54,7 +55,8 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
table_name: source_table,
parent_table_name: partitioned_table,
partitioning_column: partition_column,
- zero_partition_value: min_date) do |converter|
+ zero_partition_value: min_date,
+ **extra_options) do |converter|
expect(converter).to receive(expected_method)
end
@@ -64,12 +66,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
describe '#convert_table_to_first_list_partition' do
it_behaves_like 'delegates to ConvertTableToFirstListPartition' do
+ let(:lock_tables) { [source_table] }
+ let(:extra_options) { { lock_tables: lock_tables } }
let(:expected_method) { :partition }
let(:migrate) do
migration.convert_table_to_first_list_partition(table_name: source_table,
partitioning_column: partition_column,
parent_table_name: partitioned_table,
- initial_partitioning_value: min_date)
+ initial_partitioning_value: min_date,
+ lock_tables: lock_tables)
end
end
end
diff --git a/spec/lib/gitlab/database/postgres_partition_spec.rb b/spec/lib/gitlab/database/postgres_partition_spec.rb
index 5a44090d5ae..14a4d405621 100644
--- a/spec/lib/gitlab/database/postgres_partition_spec.rb
+++ b/spec/lib/gitlab/database/postgres_partition_spec.rb
@@ -72,4 +72,36 @@ RSpec.describe Gitlab::Database::PostgresPartition, type: :model do
expect(find(identifier).condition).to eq("FOR VALUES FROM ('2020-01-01 00:00:00+00') TO ('2020-02-01 00:00:00+00')")
end
end
+
+ describe '.partition_exists?' do
+ subject { described_class.partition_exists?(table_name) }
+
+ context 'when the partition exists' do
+ let(:table_name) { "ci_builds_metadata" }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the partition does not exist' do
+ let(:table_name) { 'partition_does_not_exist' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '.legacy_partition_exists?' do
+ subject { described_class.legacy_partition_exists?(table_name) }
+
+ context 'when the partition exists' do
+ let(:table_name) { "ci_builds_metadata" }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the partition does not exist' do
+ let(:table_name) { 'partition_does_not_exist' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb
index 0b849063562..6dc9ffc4aba 100644
--- a/spec/lib/gitlab/database/query_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzer_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
before do
allow(described_class.instance).to receive(:all_analyzers).and_return([analyzer, disabled_analyzer])
allow(analyzer).to receive(:enabled?).and_return(true)
+ allow(analyzer).to receive(:raw?).and_return(false)
allow(analyzer).to receive(:suppressed?).and_return(false)
allow(analyzer).to receive(:begin!)
allow(analyzer).to receive(:end!)
@@ -181,6 +182,13 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error
end
+ it 'does call analyze with raw sql when raw? is true' do
+ expect(analyzer).to receive(:raw?).and_return(true)
+ expect(analyzer).to receive(:analyze).with('SELECT 1 FROM projects')
+
+ expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error
+ end
+
def process_sql(sql)
described_class.instance.within do
ApplicationRecord.load_balancer.read_write do |connection|
diff --git a/spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb
new file mode 100644
index 00000000000..0fe19041b6d
--- /dev/null
+++ b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::QueryAnalyzers::Ci::PartitioningIdAnalyzer, query_analyzers: false do
+ let(:analyzer) { described_class }
+
+ before do
+ allow(Gitlab::Database::QueryAnalyzer.instance).to receive(:all_analyzers).and_return([analyzer])
+ end
+
+ context 'when ci_partitioning_analyze_queries_partition_id_check is disabled' do
+ before do
+ stub_feature_flags(ci_partitioning_analyze_queries_partition_id_check: false)
+ end
+
+ it 'does not analyze the query' do
+ expect(analyzer).not_to receive(:analyze)
+
+ process_sql(Ci::BuildMetadata, "SELECT 1 FROM ci_builds_metadata")
+ end
+ end
+
+ context 'when ci_partitioning_analyze_queries_partition_id_check is enabled' do
+ context 'when querying a routing table' do
+ shared_examples 'a good query' do |sql|
+ it 'does not raise error' do
+ expect { process_sql(Ci::BuildMetadata, sql) }.not_to raise_error
+ end
+ end
+
+ shared_examples 'a bad query' do |sql|
+ it 'raises PartitionIdMissingError' do
+ expect { process_sql(Ci::BuildMetadata, sql) }.to raise_error(described_class::PartitionIdMissingError)
+ end
+ end
+
+ context 'when partition_id is present' do
+ context 'when selecting data' do
+ it_behaves_like 'a good query', 'SELECT * FROM p_ci_builds_metadata WHERE partition_id = 100'
+ end
+
+ context 'with a join query' do
+ sql = <<~SQL
+ SELECT ci_builds.id
+ FROM p_ci_builds
+ JOIN p_ci_builds_metadata ON p_ci_builds_metadata.build_id = ci_builds.id
+ WHERE ci_builds.type = 'Ci::Build'
+ AND ci_builds.partition_id = 100
+ AND (NOT p_ci_builds_metadata.id IN
+ (SELECT p_ci_builds_metadata.id
+ FROM p_ci_builds_metadata
+ WHERE p_ci_builds_metadata.build_id = ci_builds.id
+ AND p_ci_builds_metadata.interruptible = TRUE
+ AND p_ci_builds_metadata.partition_id = 100 ));
+ SQL
+
+ it_behaves_like 'a good query', sql
+ end
+
+ context 'when removing data' do
+ it_behaves_like 'a good query', 'DELETE FROM p_ci_builds_metadata WHERE partition_id = 100'
+ end
+
+ context 'when updating data' do
+ sql = 'UPDATE p_ci_builds_metadata SET interruptible = false WHERE partition_id = 100'
+
+ it_behaves_like 'a good query', sql
+ end
+
+ context 'when inserting a record' do
+ it_behaves_like 'a good query', 'INSERT INTO p_ci_builds_metadata (id, partition_id) VALUES(1, 1)'
+ end
+ end
+
+ context 'when partition_id is missing' do
+ context 'when inserting a record' do
+ it_behaves_like 'a bad query', 'INSERT INTO p_ci_builds_metadata (id) VALUES(1)'
+ end
+
+ context 'when selecting data' do
+ it_behaves_like 'a bad query', 'SELECT * FROM p_ci_builds_metadata WHERE id = 1'
+ end
+
+ context 'when removing data' do
+ it_behaves_like 'a bad query', 'DELETE FROM p_ci_builds_metadata WHERE id = 1'
+ end
+
+ context 'when updating data' do
+ it_behaves_like 'a bad query', 'UPDATE p_ci_builds_metadata SET interruptible = false WHERE id = 1'
+ end
+
+ context 'with a join query' do
+ sql = <<~SQL
+ SELECT ci_builds.id
+ FROM ci_builds
+ JOIN p_ci_builds_metadata ON p_ci_builds_metadata.build_id = ci_builds.id
+ WHERE ci_builds.type = 'Ci::Build'
+ AND ci_builds.partition_id = 100
+ AND (NOT p_ci_builds_metadata.id IN
+ (SELECT p_ci_builds_metadata.id
+ FROM p_ci_builds_metadata
+ WHERE p_ci_builds_metadata.build_id = ci_builds.id
+ AND p_ci_builds_metadata.interruptible = TRUE ));
+ SQL
+
+ it_behaves_like 'a bad query', sql
+ end
+ end
+ end
+ end
+
+ private
+
+ def process_sql(model, sql)
+ Gitlab::Database::QueryAnalyzer.instance.within do
+ # Skip load balancer and retrieve connection assigned to model
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/query_analyzers/ci/partitioning_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb
index ef7c7965c09..1f86c2ccbb0 100644
--- a/spec/lib/gitlab/database/query_analyzers/ci/partitioning_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::QueryAnalyzers::Ci::PartitioningAnalyzer, query_analyzers: false do
+RSpec.describe Gitlab::Database::QueryAnalyzers::Ci::PartitioningRoutingAnalyzer, query_analyzers: false do
let(:analyzer) { described_class }
before do
@@ -54,15 +54,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::Ci::PartitioningAnalyzer, query
context 'when analyzing non targeted table' do
it 'does not raise error' do
- expect { process_sql(Ci::BuildMetadata, "SELECT 1 FROM projects") }
- .not_to raise_error
- end
- end
-
- context 'when querying a routing table' do
- it 'does not raise error' do
- expect { process_sql(Ci::BuildMetadata, "SELECT 1 FROM p_ci_builds_metadata") }
- .not_to raise_error
+ expect { process_sql(Ci::BuildMetadata, "SELECT 1 FROM projects") }.not_to raise_error
end
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb b/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
new file mode 100644
index 00000000000..ec01ae623ae
--- /dev/null
+++ b/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::QueryAnalyzers::QueryRecorder, query_analyzers: false do
+ # We keep only the QueryRecorder analyzer running
+ around do |example|
+ described_class.with_suppressed(false) do
+ example.run
+ end
+ end
+
+ context 'when analyzer is enabled for tests' do
+ let(:query) { 'SELECT 1 FROM projects' }
+ let(:log_path) { Rails.root.join(described_class::LOG_FILE) }
+
+ before do
+ stub_env('CI', 'true')
+
+ # This is needed to be able to stub_env the CI variable
+ ::Gitlab::Database::QueryAnalyzer.instance.begin!([described_class])
+ end
+
+ after do
+ ::Gitlab::Database::QueryAnalyzer.instance.end!([described_class])
+ end
+
+ it 'logs queries to a file' do
+ allow(FileUtils).to receive(:mkdir_p)
+ .with(File.dirname(log_path))
+ expect(File).to receive(:write)
+ .with(log_path, /^{"sql":"#{query}/, mode: 'a')
+ expect(described_class).to receive(:analyze).with(/^#{query}/).and_call_original
+
+ expect { ApplicationRecord.connection.execute(query) }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb
index 01af9efd782..4f68cd93a8e 100644
--- a/spec/lib/gitlab/database/tables_truncate_spec.rb
+++ b/spec/lib/gitlab/database/tables_truncate_spec.rb
@@ -233,6 +233,26 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
it_behaves_like 'truncating legacy tables on a database'
end
+ context 'when running with multiple shared databases' do
+ before do
+ skip_if_multiple_databases_not_setup
+ ci_db_config = Ci::ApplicationRecord.connection_db_config
+ allow(::Gitlab::Database).to receive(:db_config_share_with).with(ci_db_config).and_return('main')
+ end
+
+ it 'raises an error when truncating the main database that it is a single database setup' do
+ expect do
+ described_class.new(database_name: 'main', min_batch_size: min_batch_size).execute
+ end.to raise_error(/Cannot truncate legacy tables in single-db setup/)
+ end
+
+ it 'raises an error when truncating the ci database that it is a single database setup' do
+ expect do
+ described_class.new(database_name: 'ci', min_batch_size: min_batch_size).execute
+ end.to raise_error(/Cannot truncate legacy tables in single-db setup/)
+ end
+ end
+
context 'when running in a single database mode' do
before do
skip_if_multiple_databases_are_setup
diff --git a/spec/lib/gitlab/database/type/symbolized_jsonb_spec.rb b/spec/lib/gitlab/database/type/symbolized_jsonb_spec.rb
new file mode 100644
index 00000000000..a8401667b34
--- /dev/null
+++ b/spec/lib/gitlab/database/type/symbolized_jsonb_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Type::SymbolizedJsonb do
+ let(:type) { described_class.new }
+
+ describe '#deserialize' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { type.deserialize(json) }
+
+ where(:json, :value) do
+ nil | nil
+ '{"key":"value"}' | { key: 'value' }
+ '{"key":[1,2,3]}' | { key: [1, 2, 3] }
+ '{"key":{"subkey":"value"}}' | { key: { subkey: 'value' } }
+ '{"key":{"a":[{"b":"c"},{"d":"e"}]}}' | { key: { a: [{ b: 'c' }, { d: 'e' }] } }
+ end
+
+ with_them do
+ it { is_expected.to match(value) }
+ end
+ end
+
+ context 'when used by a model' do
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = :_test_symbolized_jsonb
+
+ attribute :options, :sym_jsonb
+ end
+ end
+
+ let(:record) do
+ model.create!(name: 'test', options: { key: 'value' })
+ end
+
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE TABLE _test_symbolized_jsonb(
+ id serial NOT NULL PRIMARY KEY,
+ name text,
+ options jsonb);
+ SQL
+
+ model.reset_column_information
+ end
+
+ it { expect(record.options).to match({ key: 'value' }) }
+
+ it 'ignores changes to other attributes' do
+ record.name = 'other test'
+
+ expect(record.changes).to match('name' => ['test', 'other test'])
+ end
+
+ it 'tracks changes to options' do
+ record.options = { key: 'other value' }
+
+ expect(record.changes).to match('options' => [{ 'key' => 'value' }, { 'key' => 'other value' }])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
index 9d514bcc661..d67e50a50d4 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb
@@ -11,13 +11,13 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService
it 'returns error' do
expect(result).to eq(
status: :error,
- message: 'Self monitoring project does not exist',
+ message: 'Self-monitoring project does not exist',
last_step: :validate_self_monitoring_project_exists
)
end
end
- context 'when self monitoring project exists' do
+ context 'when self-monitoring project exists' do
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index eb42734d044..c788022bd3a 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -242,13 +242,9 @@ RSpec.describe Gitlab::Database do
pool&.disconnect!
end
- context "when there's CI connection", :request_store do
+ context "when there's CI connection" do
before do
skip_if_multiple_databases_not_setup
-
- # FF due to lib/gitlab/database/load_balancing/configuration.rb:92
- # Requires usage of `:request_store`
- stub_feature_flags(force_no_sharing_primary_model: true)
end
context 'when CI uses database_tasks: false does indicate that ci: is subset of main:' do
diff --git a/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb b/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb
index c24d6a44d9b..02fac96a02f 100644
--- a/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb
@@ -40,7 +40,8 @@ RSpec.describe Gitlab::DependencyLinker::ComposerJsonLinker do
"mockery/mockery": "0.9.*",
"phpunit/phpunit": "~4.0",
"symfony/css-selector": "2.8.*|3.0.*",
- "symfony/dom-crawler": "2.8.*|3.0.*"
+ "symfony/dom-crawler": "2.8.*|3.0.*",
+ "drupal/bootstrap": "3.x-dev"
}
}
CONTENT
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index d623a390dc8..ad2524e40c5 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -55,22 +55,8 @@ RSpec.describe Gitlab::Diff::File do
let(:commit) { project.commit("532c837") }
context 'when file is ipynb' do
- let(:ipynb_semantic_diff) { false }
-
- before do
- stub_feature_flags(ipynb_semantic_diff: ipynb_semantic_diff)
- end
-
- subject { diff_file.rendered }
-
- context 'when ipynb_semantic_diff is off' do
- it { is_expected.to be_nil }
- end
-
- context 'and rendered_viewer is on' do
- let(:ipynb_semantic_diff) { true }
-
- it { is_expected.not_to be_nil }
+ it 'creates a rendered diff file' do
+ expect(diff_file.rendered).not_to be_nil
end
end
end
@@ -152,20 +138,6 @@ RSpec.describe Gitlab::Diff::File do
expect(diff_file.rendered).to be_nil
end
end
-
- context 'when semantic ipynb is off' do
- before do
- stub_feature_flags(ipynb_semantic_diff: false)
- end
-
- it 'returns nil' do
- expect(diff_file).not_to receive(:modified_file?)
- expect(diff_file).not_to receive(:ipynb?)
- expect(diff).not_to receive(:too_large?)
-
- expect(diff_file.rendered).to be_nil
- end
- end
end
end
diff --git a/spec/lib/gitlab/doorkeeper_secret_storing/token/pbkdf2_sha512_spec.rb b/spec/lib/gitlab/doorkeeper_secret_storing/token/pbkdf2_sha512_spec.rb
index c73744cd481..e267d27ed13 100644
--- a/spec/lib/gitlab/doorkeeper_secret_storing/token/pbkdf2_sha512_spec.rb
+++ b/spec/lib/gitlab/doorkeeper_secret_storing/token/pbkdf2_sha512_spec.rb
@@ -10,16 +10,6 @@ RSpec.describe Gitlab::DoorkeeperSecretStoring::Token::Pbkdf2Sha512 do
expect(described_class.transform_secret(plaintext_token))
.to eq("$pbkdf2-sha512$20000$$.c0G5XJVEew1TyeJk5TrkvB0VyOaTmDzPrsdNRED9vVeZlSyuG3G90F0ow23zUCiWKAVwmNnR/ceh.nJG3MdpQ") # rubocop:disable Layout/LineLength
end
-
- context 'when hash_oauth_tokens is disabled' do
- before do
- stub_feature_flags(hash_oauth_tokens: false)
- end
-
- it 'returns a plaintext token' do
- expect(described_class.transform_secret(plaintext_token)).to eq(plaintext_token)
- end
- end
end
describe 'STRETCHES' do
diff --git a/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb b/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
index 2c1badbd113..2bc3cd81b48 100644
--- a/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Email::Handler::UnsubscribeHandler do
stub_config_setting(host: 'localhost')
end
- let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "#{mail_key}#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX}") }
+ let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "#{mail_key}#{Gitlab::Email::Common::UNSUBSCRIBE_SUFFIX}") }
let(:project) { create(:project, :public) }
let(:user) { create(:user) }
let(:noteable) { create(:issue, project: project) }
@@ -21,19 +21,19 @@ RSpec.describe Gitlab::Email::Handler::UnsubscribeHandler do
let(:mail) { Mail::Message.new(email_raw) }
it "matches the new format" do
- handler = described_class.new(mail, "#{mail_key}#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX}")
+ handler = described_class.new(mail, "#{mail_key}#{Gitlab::Email::Common::UNSUBSCRIBE_SUFFIX}")
expect(handler.can_handle?).to be_truthy
end
it "matches the legacy format" do
- handler = described_class.new(mail, "#{mail_key}#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX_LEGACY}")
+ handler = described_class.new(mail, "#{mail_key}#{Gitlab::Email::Common::UNSUBSCRIBE_SUFFIX_LEGACY}")
expect(handler.can_handle?).to be_truthy
end
it "doesn't match either format" do
- handler = described_class.new(mail, "+#{mail_key}#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX}")
+ handler = described_class.new(mail, "+#{mail_key}#{Gitlab::Email::Common::UNSUBSCRIBE_SUFFIX}")
expect(handler.can_handle?).to be_falsey
end
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Email::Handler::UnsubscribeHandler do
end
context 'when using old style unsubscribe link' do
- let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "#{mail_key}#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX_LEGACY}") }
+ let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "#{mail_key}#{Gitlab::Email::Common::UNSUBSCRIBE_SUFFIX_LEGACY}") }
it 'unsubscribes user from notable' do
expect { receiver.execute }.to change { noteable.subscribed?(user) }.from(true).to(false)
diff --git a/spec/lib/gitlab/email/handler_spec.rb b/spec/lib/gitlab/email/handler_spec.rb
index eff6fb63a5f..d38b7d9c85c 100644
--- a/spec/lib/gitlab/email/handler_spec.rb
+++ b/spec/lib/gitlab/email/handler_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::Email::Handler do
describe 'regexps are set properly' do
let(:addresses) do
- %W(sent_notification_key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX} sent_notification_key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX_LEGACY}) +
+ %W(sent_notification_key#{Gitlab::Email::Common::UNSUBSCRIBE_SUFFIX} sent_notification_key#{Gitlab::Email::Common::UNSUBSCRIBE_SUFFIX_LEGACY}) +
%w(sent_notification_key path-to-project-123-user_email_token-merge-request) +
%w(path-to-project-123-user_email_token-issue path-to-project-123-user_email_token-issue-123) +
%w(path/to/project+user_email_token path/to/project+merge-request+user_email_token some/project)
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 79476c63e66..9240d07fd59 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -5,11 +5,10 @@ require 'spec_helper'
RSpec.describe Gitlab::Email::Receiver do
include_context :email_shared_context
+ let_it_be(:project) { create(:project) }
let(:metric_transaction) { instance_double(Gitlab::Metrics::WebTransaction) }
shared_examples 'successful receive' do
- let_it_be(:project) { create(:project) }
-
let(:handler) { double(:handler, project: project, execute: true, metrics_event: nil, metrics_params: nil) }
let(:client_id) { 'email/jake@example.com' }
@@ -39,7 +38,7 @@ RSpec.describe Gitlab::Email::Receiver do
end
end
- shared_examples 'failed receive' do
+ shared_examples 'failed receive with event' do
it 'adds metric event' do
expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
expect(metric_transaction).to receive(:add_event).with('email_receiver_error', { error: expected_error.name })
@@ -48,6 +47,14 @@ RSpec.describe Gitlab::Email::Receiver do
end
end
+ shared_examples 'failed receive without event' do
+ it 'adds metric event' do
+ expect(::Gitlab::Metrics::BackgroundTransaction).not_to receive(:current)
+
+ expect { receiver.execute }.to raise_error(expected_error)
+ end
+ end
+
context 'when the email contains a valid email address in a header' do
before do
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
@@ -100,21 +107,21 @@ RSpec.describe Gitlab::Email::Receiver do
let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, '!!!') }
let(:expected_error) { Gitlab::Email::UnknownIncomingEmail }
- it_behaves_like 'failed receive'
+ it_behaves_like 'failed receive with event'
end
context 'when the email is blank' do
let(:email_raw) { '' }
let(:expected_error) { Gitlab::Email::EmptyEmailError }
- it_behaves_like 'failed receive'
+ it_behaves_like 'failed receive without event'
end
context 'when the email was auto generated with Auto-Submitted header' do
let(:email_raw) { fixture_file('emails/auto_submitted.eml') }
let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
- it_behaves_like 'failed receive'
+ it_behaves_like 'failed receive without event'
end
context "when the email's To field is blank" do
@@ -164,7 +171,48 @@ RSpec.describe Gitlab::Email::Receiver do
let(:email_raw) { fixture_file('emails/auto_reply.eml') }
let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
- it_behaves_like 'failed receive'
+ it_behaves_like 'failed receive without event'
+ end
+
+ describe 'event raising via errors' do
+ let(:handler) { double(:handler, project: project, execute: true, metrics_event: nil, metrics_params: nil) }
+ let(:email_raw) { "arbitrary text. could be anything really. we're going to raise an error anyway." }
+
+ before do
+ allow(receiver).to receive(:handler).and_return(handler)
+ allow(handler).to receive(:execute).and_raise(expected_error)
+ end
+
+ describe 'handling errors which do not raise events' do
+ where(:expected_error) do
+ [
+ Gitlab::Email::AutoGeneratedEmailError,
+ Gitlab::Email::ProjectNotFound,
+ Gitlab::Email::EmptyEmailError,
+ Gitlab::Email::UserNotFoundError,
+ Gitlab::Email::UserBlockedError,
+ Gitlab::Email::UserNotAuthorizedError,
+ Gitlab::Email::NoteableNotFoundError,
+ Gitlab::Email::InvalidAttachment,
+ Gitlab::Email::InvalidRecordError,
+ Gitlab::Email::EmailTooLarge
+ ]
+ end
+
+ with_them do
+ it_behaves_like 'failed receive without event'
+ end
+ end
+
+ describe 'handling errors which do raise events' do
+ where(:expected_error) do
+ [Gitlab::Email::EmailUnparsableError, Gitlab::Email::UnknownIncomingEmail, ArgumentError, StandardError]
+ end
+
+ with_them do
+ it_behaves_like 'failed receive with event'
+ end
+ end
end
it 'requires all handlers to have a unique metric_event' do
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index fd859ae40fb..4900547e9e9 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -369,6 +369,25 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
+ context 'when exception is excluded' do
+ before do
+ stub_const('SubclassRetryError', Class.new(Gitlab::SidekiqMiddleware::RetryError))
+ end
+
+ ['Gitlab::SidekiqMiddleware::RetryError', 'SubclassRetryError'].each do |ex|
+ let(:exception) { ex.constantize.new }
+
+ it "does not report #{ex} exception to Sentry" do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error)
+
+ track_exception
+
+ expect(Raven.client.transport.events).to eq([])
+ expect(Sentry.get_current_client.transport.events).to eq([])
+ end
+ end
+ end
+
context 'when processing invalid URI exceptions' do
let(:invalid_uri) { 'http://foo:bar' }
let(:raven_exception_values) { raven_event['exception']['values'] }
diff --git a/spec/lib/gitlab/experimentation/group_types_spec.rb b/spec/lib/gitlab/experimentation/group_types_spec.rb
deleted file mode 100644
index 2b118d76fa4..00000000000
--- a/spec/lib/gitlab/experimentation/group_types_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-
-RSpec.describe Gitlab::Experimentation::GroupTypes do
- it 'defines a GROUP_CONTROL constant' do
- expect(described_class.const_defined?(:GROUP_CONTROL)).to be_truthy
- end
-
- it 'defines a GROUP_EXPERIMENTAL constant' do
- expect(described_class.const_defined?(:GROUP_EXPERIMENTAL)).to be_truthy
- end
-end
diff --git a/spec/lib/gitlab/feature_categories_spec.rb b/spec/lib/gitlab/feature_categories_spec.rb
index 477da900d0a..a35166a4499 100644
--- a/spec/lib/gitlab/feature_categories_spec.rb
+++ b/spec/lib/gitlab/feature_categories_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::FeatureCategories do
let(:fake_categories) { %w(foo bar) }
- subject { described_class.new(fake_categories) }
+ subject(:feature_categories) { described_class.new(fake_categories) }
describe "#valid?" do
it "returns true if category is known", :aggregate_failures do
@@ -14,6 +14,28 @@ RSpec.describe Gitlab::FeatureCategories do
end
end
+ describe '#get!' do
+ subject { feature_categories.get!(category) }
+
+ let(:category) { 'foo' }
+
+ it { is_expected.to eq('foo') }
+
+ context 'when category does not exist' do
+ let(:category) { 'zzz' }
+
+ it { expect { subject }.to raise_error(RuntimeError) }
+
+ context 'when on production' do
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ end
+
+ it { is_expected.to eq('unknown') }
+ end
+ end
+ end
+
describe "#from_request" do
let(:request_env) { {} }
let(:verified) { true }
diff --git a/spec/lib/gitlab/git/object_pool_spec.rb b/spec/lib/gitlab/git/object_pool_spec.rb
index 3b1eb0319f8..b158c7227d4 100644
--- a/spec/lib/gitlab/git/object_pool_spec.rb
+++ b/spec/lib/gitlab/git/object_pool_spec.rb
@@ -78,44 +78,40 @@ RSpec.describe Gitlab::Git::ObjectPool do
end
describe '#fetch' do
- let(:commit_count) { source_repository.commit_count }
+ context 'when the object pool repository exists' do
+ let!(:pool_repository) { create(:pool_repository, :ready) }
- context "when the object's pool repository exists" do
- it 'does not raise an error' do
- expect { subject.fetch }.not_to raise_error
+ context 'without changes' do
+ it 'does not raise an error' do
+ expect { subject.fetch }.not_to raise_error
+ end
end
- end
-
- context "when the object's pool repository does not exist" do
- before do
- subject.delete
- end
-
- it "re-creates the object pool's repository" do
- subject.fetch
-
- expect(subject.repository.exists?).to be true
- end
-
- it 'does not raise an error' do
- expect { subject.fetch }.not_to raise_error
- end
-
- it 'fetches objects from the source repository' do
- new_commit_id = source_repository.create_file(
- pool_repository.source_project.owner,
- 'a.file',
- 'This is a file',
- branch_name: source_repository.root_ref,
- message: 'Add a file'
- )
-
- expect(subject.repository.exists?).to be false
-
- subject.fetch
- expect(subject.repository.commit_count('refs/remotes/origin/heads/master')).to eq(commit_count)
- expect(subject.repository.commit(new_commit_id).id).to eq(new_commit_id)
+ context 'with new commit in source repository' do
+ let(:branch_name) { Gitlab::Git::Ref.extract_branch_name(source_repository.root_ref) }
+ let(:source_ref_name) { "refs/heads/#{branch_name}" }
+ let(:pool_ref_name) { "refs/remotes/origin/heads/#{branch_name}" }
+
+ let(:new_commit_id) do
+ source_repository.create_file(
+ pool_repository.source_project.owner,
+ 'a.file',
+ 'This is a file',
+ branch_name: branch_name,
+ message: 'Add a file'
+ )
+ end
+
+ it 'fetches objects from the source repository' do
+ # Sanity-check that the commit does not yet exist in the pool repository.
+ expect(subject.repository.commit(new_commit_id)).to be_nil
+
+ subject.fetch
+
+ expect(subject.repository.commit(pool_ref_name).id).to eq(new_commit_id)
+ expect(subject.repository.commit_count(pool_ref_name))
+ .to eq(source_repository.raw_repository.commit_count(source_ref_name))
+ end
end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index f3d3fd2034c..5e27979cbf3 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -461,11 +461,7 @@ RSpec.describe Gitlab::Git::Repository do
end
it 'raises an error if it failed' do
- # TODO: Once https://gitlab.com/gitlab-org/gitaly/-/merge_requests/4921
- # is merged, remove the assertion for Gitlab::Git::Repository::GitError
- expect { repository.delete_refs('refs\heads\fix') }.to raise_error do |e|
- expect(e).to be_a(Gitlab::Git::Repository::GitError).or be_a(Gitlab::Git::InvalidRefFormatError)
- end
+ expect { repository.delete_refs('refs\heads\fix') }.to raise_error(Gitlab::Git::InvalidRefFormatError)
end
end
@@ -940,10 +936,8 @@ RSpec.describe Gitlab::Git::Repository do
let(:options) { { ref: 'master', path: ['PROCESS.md', 'README.md'] } }
def commit_files(commit)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- commit.deltas.flat_map do |delta|
- [delta.old_path, delta.new_path].uniq.compact
- end
+ commit.deltas.flat_map do |delta|
+ [delta.old_path, delta.new_path].uniq.compact
end
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 7c84c737c00..17f802b9f66 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -239,7 +239,7 @@ RSpec.describe Gitlab::Git::Tree do
let(:pagination_params) { { limit: 5, page_token: 'aabbccdd' } }
it 'raises a command error' do
- expect { entries }.to raise_error(Gitlab::Git::CommandError, 'could not find starting OID: aabbccdd')
+ expect { entries }.to raise_error(Gitlab::Git::CommandError, /could not find starting OID: aabbccdd/)
end
end
diff --git a/spec/lib/gitlab/git_ref_validator_spec.rb b/spec/lib/gitlab/git_ref_validator_spec.rb
index 6938ad51189..03dd4e7b89b 100644
--- a/spec/lib/gitlab/git_ref_validator_spec.rb
+++ b/spec/lib/gitlab/git_ref_validator_spec.rb
@@ -35,6 +35,8 @@ RSpec.describe Gitlab::GitRefValidator do
it { expect(described_class.validate('.tag')).to be false }
it { expect(described_class.validate('my branch')).to be false }
it { expect(described_class.validate("\xA0\u0000\xB0")).to be false }
+ it { expect(described_class.validate("")).to be false }
+ it { expect(described_class.validate(nil)).to be false }
end
describe '.validate_merge_request_branch' do
@@ -67,5 +69,7 @@ RSpec.describe Gitlab::GitRefValidator do
it { expect(described_class.validate_merge_request_branch('.tag')).to be false }
it { expect(described_class.validate_merge_request_branch('my branch')).to be false }
it { expect(described_class.validate_merge_request_branch("\xA0\u0000\xB0")).to be false }
+ it { expect(described_class.validate_merge_request_branch("")).to be false }
+ it { expect(described_class.validate_merge_request_branch(nil)).to be false }
end
end
diff --git a/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb b/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
index 9c3bc935acc..baf7076c718 100644
--- a/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::ObjectPoolService do
let(:pool_repository) { create(:pool_repository) }
- let(:project) { create(:project, :repository) }
+ let(:project) { pool_repository.source_project }
let(:raw_repository) { project.repository.raw }
let(:object_pool) { pool_repository.object_pool }
@@ -45,21 +45,32 @@ RSpec.describe Gitlab::GitalyClient::ObjectPoolService do
end
describe '#fetch' do
- before do
- subject.delete
+ context 'without changes' do
+ it 'fetches changes' do
+ expect(subject.fetch(project.repository)).to eq(Gitaly::FetchIntoObjectPoolResponse.new)
+ end
end
- it 'restores the pool repository objects' do
- subject.fetch(project.repository)
+ context 'with new reference in source repository' do
+ let(:branch) { 'ref-to-be-fetched' }
+ let(:source_ref) { "refs/heads/#{branch}" }
+ let(:pool_ref) { "refs/remotes/origin/heads/#{branch}" }
- expect(object_pool.repository.exists?).to be(true)
- end
+ before do
+ # Create a new reference in the source repository that we can fetch.
+ project.repository.write_ref(source_ref, 'refs/heads/master')
+ end
- context 'when called twice' do
- it "doesn't raise an error" do
- subject.delete
+ it 'fetches changes' do
+ # Sanity-check to verify that the reference only exists in the source repository now, but not in the
+ # object pool.
+ expect(project.repository.ref_exists?(source_ref)).to be(true)
+ expect(object_pool.repository.ref_exists?(pool_ref)).to be(false)
+
+ subject.fetch(project.repository)
- expect { subject.fetch(project.repository) }.not_to raise_error
+ # The fetch should've created the reference in the object pool.
+ expect(object_pool.repository.ref_exists?(pool_ref)).to be(true)
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 7e8aaa3cdf4..604feeea325 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -830,32 +830,225 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
'master', repository)
end
- before do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
- .and_return(response)
- end
+ context 'with unstructured errors' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
+ .and_return(response)
+ end
- context 'when a pre_receive_error is present' do
- let(:response) { Gitaly::UserCommitFilesResponse.new(pre_receive_error: "GitLab: something failed") }
+ context 'when a pre_receive_error is present' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new(pre_receive_error: "GitLab: something failed") }
- it 'raises a PreReceiveError' do
- expect { subject }.to raise_error(Gitlab::Git::PreReceiveError, "something failed")
+ it 'raises a PreReceiveError' do
+ expect { subject }.to raise_error(Gitlab::Git::PreReceiveError, "something failed")
+ end
end
- end
- context 'when an index_error is present' do
- let(:response) { Gitaly::UserCommitFilesResponse.new(index_error: "something failed") }
+ context 'when an index_error is present' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new(index_error: "something failed") }
- it 'raises a PreReceiveError' do
- expect { subject }.to raise_error(Gitlab::Git::Index::IndexError, "something failed")
+ it 'raises an IndexError' do
+ expect { subject }.to raise_error(Gitlab::Git::Index::IndexError, "something failed")
+ end
+ end
+
+ context 'when branch_update is nil' do
+ let(:response) { Gitaly::UserCommitFilesResponse.new }
+
+ it { expect(subject).to be_nil }
end
end
- context 'when branch_update is nil' do
- let(:response) { Gitaly::UserCommitFilesResponse.new }
+ context 'with structured errors' do
+ context 'with AccessCheckError' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(raised_error)
+ end
- it { expect(subject).to be_nil }
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ "error updating file",
+ Gitaly::UserCommitFilesError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: "something went wrong"
+ )))
+ end
+
+ it 'raises a PreReceiveError' do
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq("something went wrong")
+ end
+ end
+ end
+
+ context 'with IndexError' do
+ let(:status_code) { nil }
+ let(:expected_error) { nil }
+
+ let(:structured_error) do
+ new_detailed_error(
+ status_code,
+ "unused error message",
+ expected_error)
+ end
+
+ shared_examples '#user_commit_files failure' do
+ it 'raises a PreReceiveError' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(structured_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::Index::IndexError)
+ expect(error.message).to eq(expected_message)
+ end
+ end
+ end
+
+ context 'with missing file' do
+ let(:status_code) { GRPC::Core::StatusCodes::NOT_FOUND }
+ let(:expected_message) { "File not found: README.md" }
+ let(:expected_error) do
+ Gitaly::UserCommitFilesError.new(
+ index_update: Gitaly::IndexError.new(
+ path: "README.md",
+ error_type: Gitaly::IndexError::ErrorType::ERROR_TYPE_FILE_NOT_FOUND
+ ))
+ end
+
+ it_behaves_like '#user_commit_files failure'
+ end
+
+ context 'with existing directory' do
+ let(:status_code) { GRPC::Core::StatusCodes::ALREADY_EXISTS }
+ let(:expected_message) { "Directory already exists: dir1" }
+ let(:expected_error) do
+ Gitaly::UserCommitFilesError.new(
+ index_update: Gitaly::IndexError.new(
+ path: "dir1",
+ error_type: Gitaly::IndexError::ErrorType::ERROR_TYPE_DIRECTORY_EXISTS
+ ))
+ end
+
+ it_behaves_like '#user_commit_files failure'
+ end
+
+ context 'with existing file' do
+ let(:status_code) { GRPC::Core::StatusCodes::ALREADY_EXISTS }
+ let(:expected_message) { "File already exists: README.md" }
+ let(:expected_error) do
+ Gitaly::UserCommitFilesError.new(
+ index_update: Gitaly::IndexError.new(
+ path: "README.md",
+ error_type: Gitaly::IndexError::ErrorType::ERROR_TYPE_FILE_EXISTS
+ ))
+ end
+
+ it_behaves_like '#user_commit_files failure'
+ end
+
+ context 'with invalid path' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Invalid path: invalid://file/name" }
+ let(:expected_error) do
+ Gitaly::UserCommitFilesError.new(
+ index_update: Gitaly::IndexError.new(
+ path: "invalid://file/name",
+ error_type: Gitaly::IndexError::ErrorType::ERROR_TYPE_INVALID_PATH
+ ))
+ end
+
+ it_behaves_like '#user_commit_files failure'
+ end
+
+ context 'with directory traversal' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Directory traversal in path escapes repository: ../../../../etc/shadow" }
+ let(:expected_error) do
+ Gitaly::UserCommitFilesError.new(
+ index_update: Gitaly::IndexError.new(
+ path: "../../../../etc/shadow",
+ error_type: Gitaly::IndexError::ErrorType::ERROR_TYPE_DIRECTORY_TRAVERSAL
+ ))
+ end
+
+ it_behaves_like '#user_commit_files failure'
+ end
+
+ context 'with empty path' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Received empty path" }
+ let(:expected_error) do
+ Gitaly::UserCommitFilesError.new(
+ index_update: Gitaly::IndexError.new(
+ path: "",
+ error_type: Gitaly::IndexError::ErrorType::ERROR_TYPE_EMPTY_PATH
+ ))
+ end
+
+ it_behaves_like '#user_commit_files failure'
+ end
+
+ context 'with unspecified error' do
+ let(:status_code) { GRPC::Core::StatusCodes::INVALID_ARGUMENT }
+ let(:expected_message) { "Unknown error performing git operation" }
+ let(:expected_error) do
+ Gitaly::UserCommitFilesError.new(
+ index_update: Gitaly::IndexError.new(
+ path: "",
+ error_type: Gitaly::IndexError::ErrorType::ERROR_TYPE_UNSPECIFIED
+ ))
+ end
+
+ it_behaves_like '#user_commit_files failure'
+ end
+
+ context 'with an exception without the detailed error' do
+ let(:permission_error) do
+ GRPC::PermissionDenied.new
+ end
+
+ it 'raises PermissionDenied' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(permission_error)
+
+ expect { subject }.to raise_error(GRPC::PermissionDenied)
+ end
+ end
+ end
+
+ context 'with CustomHookError' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_commit_files).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(raised_error)
+ end
+
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ "error updating file",
+ Gitaly::UserCommitFilesError.new(
+ custom_hook: Gitaly::CustomHookError.new(
+ stdout: "some stdout",
+ stderr: "GitLab: some custom hook error message",
+ hook_type: Gitaly::CustomHookError::HookType::HOOK_TYPE_PRERECEIVE
+ )))
+ end
+
+ it 'raises a PreReceiveError' do
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq("some custom hook error message")
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 5ce88b06241..bd96e9baf1d 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::RefService do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository, create_tag: 'test') }
let(:storage_name) { project.repository_storage }
let(:relative_path) { project.disk_path + '.git' }
@@ -438,12 +438,28 @@ RSpec.describe Gitlab::GitalyClient::RefService do
it 'sends a find_refs_by_oid message' do
expect_any_instance_of(Gitaly::RefService::Stub)
.to receive(:find_refs_by_oid)
- .with(gitaly_request_with_params(sort_field: 'refname', oid: oid, limit: 1), kind_of(Hash))
+ .with(gitaly_request_with_params(sort_field: 'refname',
+ oid: oid,
+ limit: 1), kind_of(Hash))
.and_call_original
refs = client.find_refs_by_oid(oid: oid, limit: 1)
expect(refs.to_a).to eq([Gitlab::Git::BRANCH_REF_PREFIX + project.repository.root_ref])
end
+
+ it 'filters by ref_patterns' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_refs_by_oid)
+ .with(gitaly_request_with_params(sort_field: 'refname',
+ oid: oid,
+ limit: 1,
+ ref_patterns: [Gitlab::Git::TAG_REF_PREFIX]), kind_of(Hash))
+ .and_call_original
+
+ refs = client.find_refs_by_oid(oid: oid, limit: 1, ref_patterns: [Gitlab::Git::TAG_REF_PREFIX])
+
+ expect(refs.to_a).to eq([Gitlab::Git::TAG_REF_PREFIX + 'test'])
+ end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb b/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb
new file mode 100644
index 00000000000..41dce5d76dd
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb
@@ -0,0 +1,275 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do
+ let(:user) { create(:user) }
+ let(:service) do
+ Class.new do
+ include Gitlab::GitalyClient::WithFeatureFlagActors
+ end.new
+ end
+
+ describe '#user_actor' do
+ context 'when user is not available in ApplicationContext' do
+ it 'returns nil' do
+ expect(service.user_actor).to be(nil)
+ end
+ end
+
+ context 'when user is available in ApplicationContext' do
+ around do |example|
+ ::Gitlab::ApplicationContext.with_context(user: user) { example.run }
+ end
+
+ it 'returns corresponding user record' do
+ expect(service.user_actor.flipper_id).to eql(user.flipper_id)
+ end
+ end
+
+ context 'when user does not exist' do
+ around do |example|
+ ::Gitlab::ApplicationContext.with_context(user: SecureRandom.uuid) { example.run }
+ end
+
+ it 'returns corresponding user record' do
+ expect(service.user_actor).to be(nil)
+ end
+ end
+ end
+
+ describe '#repository, #project_actor, #group_actor' do
+ context 'when normal project repository' do
+ let_it_be(:project) { create(:project, group: create(:group)) }
+ let(:expected_project) { project }
+ let(:expected_group) { Feature::Gitaly::ActorWrapper.new(::Group, project.group.id) }
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { project.repository }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { project.repository.raw }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { raw_repo_without_container(project.repository) }
+ end
+ end
+
+ context 'when project wiki repository' do
+ let_it_be(:project) { create(:project, :wiki_repo, group: create(:group)) }
+ let(:expected_project) { nil }
+ let(:expected_group) { nil }
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { project.wiki.repository }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { project.wiki.repository.raw }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { raw_repo_without_container(project.wiki.repository) }
+ end
+ end
+
+ context 'when repository of project in user namespace' do
+ let_it_be(:project) { create(:project, namespace: create(:user).namespace) }
+ let(:expected_project) { project }
+ let(:expected_group) { Feature::Gitaly::ActorWrapper.new(::Group, project.namespace_id) }
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { project.repository }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { project.repository.raw }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { raw_repo_without_container(project.repository) }
+ end
+ end
+
+ context 'when personal snippet' do
+ let(:snippet) { create(:personal_snippet) }
+ let(:expected_project) { nil }
+ let(:expected_group) { nil }
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { snippet.repository }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { snippet.repository.raw }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { raw_repo_without_container(snippet.repository) }
+ end
+ end
+
+ context 'when project snippet' do
+ let_it_be(:project) { create(:project, group: create(:group)) }
+ let(:snippet) { create(:project_snippet, project: project) }
+ let(:expected_project) { nil }
+ let(:expected_group) { nil }
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { snippet.repository }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { snippet.repository.raw }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { raw_repo_without_container(snippet.repository) }
+ end
+ end
+
+ context 'when project design' do
+ let_it_be(:project) { create(:project, group: create(:group)) }
+ let(:issue) { create(:issue, project: project) }
+ let(:design) { create(:design, issue: issue) }
+
+ let(:expected_project) { project }
+ let(:expected_group) { project.group }
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { design.repository }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { design.repository.raw }
+ end
+
+ it_behaves_like 'Gitaly feature flag actors are inferred from repository' do
+ let(:repository) { raw_repo_without_container(design.repository) }
+ end
+ end
+ end
+
+ describe '#gitaly_client_call' do
+ let(:call_arg_1) { double }
+ let(:call_arg_2) { double }
+ let(:call_arg_3) { double }
+ let(:call_result) { double }
+
+ before do
+ allow(Gitlab::GitalyClient).to receive(:call).and_return(call_result)
+ end
+
+ context 'when actors_aware_gitaly_calls flag is enabled' do
+ let(:repository_actor) { instance_double(::Repository) }
+ let(:user_actor) { instance_double(::User) }
+ let(:project_actor) { instance_double(Project) }
+ let(:group_actor) { instance_double(Group) }
+
+ before do
+ stub_feature_flags(actors_aware_gitaly_calls: true)
+
+ allow(service).to receive(:user_actor).and_return(user_actor)
+ allow(service).to receive(:repository_actor).and_return(repository_actor)
+ allow(service).to receive(:project_actor).and_return(project_actor)
+ allow(service).to receive(:group_actor).and_return(group_actor)
+ allow(Gitlab::GitalyClient).to receive(:with_feature_flag_actors).and_call_original
+ end
+
+ it 'triggers client call with feature flag actors' do
+ result = service.gitaly_client_call(call_arg_1, call_arg_2, karg: call_arg_3)
+
+ expect(Gitlab::GitalyClient).to have_received(:call).with(call_arg_1, call_arg_2, karg: call_arg_3)
+ expect(Gitlab::GitalyClient).to have_received(:with_feature_flag_actors).with(
+ repository: repository_actor,
+ user: user_actor,
+ project: project_actor,
+ group: group_actor
+ )
+ expect(result).to be(call_result)
+ end
+
+ context 'when call without repository_actor' do
+ before do
+ allow(service).to receive(:repository_actor).and_return(nil)
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
+ end
+
+ it 'calls error tracking track_and_raise_for_dev_exception' do
+ expect do
+ service.gitaly_client_call(call_arg_1, call_arg_2, karg: call_arg_3)
+ end.to raise_error /gitaly_client_call called without setting repository_actor/
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(
+ be_a(Feature::InvalidFeatureFlagError)
+ )
+ end
+ end
+ end
+
+ context 'when actors_aware_gitaly_calls not enabled' do
+ before do
+ stub_feature_flags(actors_aware_gitaly_calls: false)
+ end
+
+ it 'triggers client call without feature flag actors' do
+ expect(Gitlab::GitalyClient).not_to receive(:with_feature_flag_actors)
+
+ result = service.gitaly_client_call(call_arg_1, call_arg_2, karg: call_arg_3)
+
+ expect(Gitlab::GitalyClient).to have_received(:call).with(call_arg_1, call_arg_2, karg: call_arg_3)
+ expect(result).to be(call_result)
+ end
+ end
+
+ describe '#gitaly_feature_flag_actors' do
+ let_it_be(:project) { create(:project) }
+ let(:repository_actor) { project.repository }
+
+ context 'when actors_aware_gitaly_calls flag is enabled' do
+ let(:user_actor) { instance_double(::User) }
+ let(:project_actor) { instance_double(Project) }
+ let(:group_actor) { instance_double(Group) }
+
+ before do
+ stub_feature_flags(actors_aware_gitaly_calls: true)
+
+ allow(Feature::Gitaly).to receive(:user_actor).and_return(user_actor)
+ allow(Feature::Gitaly).to receive(:project_actor).with(project).and_return(project_actor)
+ allow(Feature::Gitaly).to receive(:group_actor).with(project).and_return(group_actor)
+ end
+
+ it 'returns a hash with collected feature flag actors' do
+ result = service.gitaly_feature_flag_actors(repository_actor)
+ expect(result).to eql(
+ repository: repository_actor,
+ user: user_actor,
+ project: project_actor,
+ group: group_actor
+ )
+
+ expect(Feature::Gitaly).to have_received(:user_actor).with(no_args)
+ expect(Feature::Gitaly).to have_received(:project_actor).with(project)
+ expect(Feature::Gitaly).to have_received(:group_actor).with(project)
+ end
+ end
+
+ context 'when actors_aware_gitaly_calls not enabled' do
+ before do
+ stub_feature_flags(actors_aware_gitaly_calls: false)
+ end
+
+ it 'returns an empty hash' do
+ expect(Feature::Gitaly).not_to receive(:user_actor)
+ expect(Feature::Gitaly).not_to receive(:project_actor)
+ expect(Feature::Gitaly).not_to receive(:group_actor)
+
+ result = service.gitaly_feature_flag_actors(repository_actor)
+ expect(result).to eql({})
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index a3840ca843f..3d33bf93c23 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -259,6 +259,102 @@ RSpec.describe Gitlab::GitalyClient do
end
end
+ shared_examples 'gitaly feature flags in metadata' do
+ before do
+ allow(Feature::Gitaly).to receive(:server_feature_flags).and_return(
+ 'gitaly-feature-a' => 'true',
+ 'gitaly-feature-b' => 'false'
+ )
+ end
+
+ it 'evaluates Gitaly server feature flags' do
+ metadata = described_class.request_kwargs('default', timeout: 1)[:metadata]
+
+ expect(Feature::Gitaly).to have_received(:server_feature_flags).with(no_args)
+ expect(metadata['gitaly-feature-a']).to be('true')
+ expect(metadata['gitaly-feature-b']).to be('false')
+ end
+
+ context 'when there are actors' do
+ let(:repository_actor) { double(:actor) }
+ let(:project_actor) { double(:actor) }
+ let(:user_actor) { double(:actor) }
+ let(:group_actor) { double(:actor) }
+
+ it 'evaluates Gitaly server feature flags with actors' do
+ metadata = described_class.with_feature_flag_actors(
+ repository: repository_actor,
+ project: project_actor,
+ user: user_actor,
+ group: group_actor
+ ) do
+ described_class.request_kwargs('default', timeout: 1)[:metadata]
+ end
+
+ expect(Feature::Gitaly).to have_received(:server_feature_flags).with(
+ repository: repository_actor,
+ project: project_actor,
+ user: user_actor,
+ group: group_actor
+ )
+ expect(metadata['gitaly-feature-a']).to be('true')
+ expect(metadata['gitaly-feature-b']).to be('false')
+ end
+ end
+ end
+
+ context 'server_feature_flags when RequestStore is activated', :request_store do
+ it_behaves_like 'gitaly feature flags in metadata'
+ end
+
+ context 'server_feature_flags when RequestStore is not activated' do
+ it_behaves_like 'gitaly feature flags in metadata'
+ end
+
+ context 'logging information in metadata' do
+ let(:user) { create(:user) }
+
+ context 'user is added to application context' do
+ it 'injects username and user_id into gRPC metadata' do
+ metadata = {}
+ ::Gitlab::ApplicationContext.with_context(user: user) do
+ metadata = described_class.request_kwargs('default', timeout: 1)[:metadata]
+ end
+
+ expect(metadata['username']).to eql(user.username)
+ expect(metadata['user_id']).to eql(user.id.to_s)
+ end
+ end
+
+ context 'user is not added to application context' do
+ it 'does not inject username and user_id into gRPC metadata' do
+ metadata = described_class.request_kwargs('default', timeout: 1)[:metadata]
+
+ expect(metadata).not_to have_key('username')
+ expect(metadata).not_to have_key('user_id')
+ end
+ end
+
+ context 'remote_ip is added to application context' do
+ it 'injects remote_ip into gRPC metadata' do
+ metadata = {}
+ ::Gitlab::ApplicationContext.with_context(remote_ip: '1.2.3.4') do
+ metadata = described_class.request_kwargs('default', timeout: 1)[:metadata]
+ end
+
+ expect(metadata['remote_ip']).to eql('1.2.3.4')
+ end
+ end
+
+ context 'remote_ip is not added to application context' do
+ it 'does not inject remote_ip into gRPC metadata' do
+ metadata = described_class.request_kwargs('default', timeout: 1)[:metadata]
+
+ expect(metadata).not_to have_key('remote_ip')
+ end
+ end
+ end
+
context 'gitlab_git_env' do
let(:policy) { 'gitaly-route-repository-accessor-policy' }
@@ -585,4 +681,42 @@ RSpec.describe Gitlab::GitalyClient do
end
end
end
+
+ describe '.with_feature_flag_actor', :request_store do
+ shared_examples 'with_feature_flag_actor' do
+ let(:repository_actor) { double(:actor) }
+ let(:project_actor) { double(:actor) }
+ let(:user_actor) { double(:actor) }
+ let(:group_actor) { double(:actor) }
+
+ it 'allows access to feature flag actors inside the block' do
+ expect(described_class.feature_flag_actors).to eql({})
+
+ described_class.with_feature_flag_actors(
+ repository: repository_actor,
+ project: project_actor,
+ user: user_actor,
+ group: group_actor
+ ) do
+ expect(
+ described_class.feature_flag_actors
+ ).to eql(
+ repository: repository_actor,
+ project: project_actor,
+ user: user_actor,
+ group: group_actor)
+ end
+
+ expect(described_class.feature_flag_actors).to eql({})
+ end
+ end
+
+ context 'when RequestStore is activated', :request_store do
+ it_behaves_like 'with_feature_flag_actor'
+ end
+
+ context 'when RequestStore is not activated' do
+ it_behaves_like 'with_feature_flag_actor'
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
index 57391e06192..dc9f939a19b 100644
--- a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
+++ b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader do
let_it_be(:content_type) { 'application/octet-stream' }
let(:content_length) { 1000 }
- let(:chunk_double) { instance_double(HTTParty::FragmentWithResponse, code: 200) }
+ let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 200) }
let(:headers_double) do
instance_double(
HTTParty::Response,
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 3361b039a27..95f7933fbc5 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -3,24 +3,24 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Client do
- describe '#parallel?' do
- it 'returns true when the client is running in parallel mode' do
- client = described_class.new('foo', parallel: true)
+ subject(:client) { described_class.new('foo', parallel: parallel) }
+
+ let(:parallel) { true }
- expect(client).to be_parallel
+ describe '#parallel?' do
+ context 'when the client is running in parallel mode' do
+ it { expect(client).to be_parallel }
end
- it 'returns false when the client is running in sequential mode' do
- client = described_class.new('foo', parallel: false)
+ context 'when the client is running in sequential mode' do
+ let(:parallel) { false }
- expect(client).not_to be_parallel
+ it { expect(client).not_to be_parallel }
end
end
describe '#user' do
it 'returns the details for the given username' do
- client = described_class.new('foo')
-
expect(client.octokit).to receive(:user).with('foo')
expect(client).to receive(:with_rate_limit).and_yield
@@ -30,8 +30,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#pull_request_reviews' do
it 'returns the pull request reviews' do
- client = described_class.new('foo')
-
expect(client)
.to receive(:each_object)
.with(:pull_request_reviews, 'foo/bar', 999)
@@ -40,10 +38,17 @@ RSpec.describe Gitlab::GithubImport::Client do
end
end
+ describe '#pull_request_review_requests' do
+ it 'returns the pull request review requests' do
+ expect(client.octokit).to receive(:pull_request_review_requests).with('foo/bar', 999)
+ expect(client).to receive(:with_rate_limit).and_yield
+
+ client.pull_request_review_requests('foo/bar', 999)
+ end
+ end
+
describe '#repos' do
it 'returns the user\'s repositories as a hash' do
- client = described_class.new('foo')
-
stub_request(:get, 'https://api.github.com/rate_limit')
.to_return(status: 200, headers: { 'X-RateLimit-Limit' => 5000, 'X-RateLimit-Remaining' => 5000 })
@@ -58,8 +63,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#repository' do
it 'returns the details of a repository' do
- client = described_class.new('foo')
-
expect(client.octokit).to receive(:repo).with('foo/bar')
expect(client).to receive(:with_rate_limit).and_yield
@@ -67,8 +70,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
it 'returns repository data as a hash' do
- client = described_class.new('foo')
-
stub_request(:get, 'https://api.github.com/rate_limit')
.to_return(status: 200, headers: { 'X-RateLimit-Limit' => 5000, 'X-RateLimit-Remaining' => 5000 })
@@ -83,8 +84,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#pull_request' do
it 'returns the details of a pull_request' do
- client = described_class.new('foo')
-
expect(client.octokit).to receive(:pull_request).with('foo/bar', 999)
expect(client).to receive(:with_rate_limit).and_yield
@@ -94,8 +93,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#labels' do
it 'returns the labels' do
- client = described_class.new('foo')
-
expect(client)
.to receive(:each_object)
.with(:labels, 'foo/bar')
@@ -106,8 +103,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#milestones' do
it 'returns the milestones' do
- client = described_class.new('foo')
-
expect(client)
.to receive(:each_object)
.with(:milestones, 'foo/bar')
@@ -118,8 +113,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#releases' do
it 'returns the releases' do
- client = described_class.new('foo')
-
expect(client)
.to receive(:each_object)
.with(:releases, 'foo/bar')
@@ -130,8 +123,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#branches' do
it 'returns the branches' do
- client = described_class.new('foo')
-
expect(client)
.to receive(:each_object)
.with(:branches, 'foo/bar')
@@ -142,8 +133,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#branch_protection' do
it 'returns the protection details for the given branch' do
- client = described_class.new('foo')
-
expect(client.octokit)
.to receive(:branch_protection).with('org/repo', 'bar')
expect(client).to receive(:with_rate_limit).and_yield
@@ -156,8 +145,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#each_object' do
it 'converts each object into a hash' do
- client = described_class.new('foo')
-
stub_request(:get, 'https://api.github.com/rate_limit')
.to_return(status: 200, headers: { 'X-RateLimit-Limit' => 5000, 'X-RateLimit-Remaining' => 5000 })
@@ -171,7 +158,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#each_page' do
- let(:client) { described_class.new('foo') }
let(:object1) { double(:object1) }
let(:object2) { double(:object2) }
@@ -242,8 +228,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#with_rate_limit' do
- let(:client) { described_class.new('foo') }
-
it 'yields the supplied block when enough requests remain' do
expect(client).to receive(:requests_remaining?).and_return(true)
@@ -340,8 +324,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#requests_remaining?' do
- let(:client) { described_class.new('foo') }
-
context 'when default requests limit is set' do
before do
allow(client).to receive(:requests_limit).and_return(5000)
@@ -380,44 +362,43 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#raise_or_wait_for_rate_limit' do
- it 'raises RateLimitError when running in parallel mode' do
- client = described_class.new('foo', parallel: true)
-
- expect { client.raise_or_wait_for_rate_limit }
- .to raise_error(Gitlab::GithubImport::RateLimitError)
+ context 'when running in parallel mode' do
+ it 'raises RateLimitError' do
+ expect { client.raise_or_wait_for_rate_limit }
+ .to raise_error(Gitlab::GithubImport::RateLimitError)
+ end
end
- it 'sleeps when running in sequential mode' do
- client = described_class.new('foo', parallel: false)
-
- expect(client).to receive(:rate_limit_resets_in).and_return(1)
- expect(client).to receive(:sleep).with(1)
+ context 'when running in sequential mode' do
+ let(:parallel) { false }
- client.raise_or_wait_for_rate_limit
- end
+ it 'sleeps' do
+ expect(client).to receive(:rate_limit_resets_in).and_return(1)
+ expect(client).to receive(:sleep).with(1)
- it 'increments the rate limit counter' do
- client = described_class.new('foo', parallel: false)
+ client.raise_or_wait_for_rate_limit
+ end
- expect(client)
- .to receive(:rate_limit_resets_in)
- .and_return(1)
+ it 'increments the rate limit counter' do
+ expect(client)
+ .to receive(:rate_limit_resets_in)
+ .and_return(1)
- expect(client)
- .to receive(:sleep)
- .with(1)
+ expect(client)
+ .to receive(:sleep)
+ .with(1)
- expect(client.rate_limit_counter)
- .to receive(:increment)
- .and_call_original
+ expect(client.rate_limit_counter)
+ .to receive(:increment)
+ .and_call_original
- client.raise_or_wait_for_rate_limit
+ client.raise_or_wait_for_rate_limit
+ end
end
end
describe '#remaining_requests' do
it 'returns the number of remaining requests' do
- client = described_class.new('foo')
rate_limit = double(remaining: 1)
expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
@@ -427,7 +408,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#requests_limit' do
it 'returns requests limit' do
- client = described_class.new('foo')
rate_limit = double(limit: 1)
expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
@@ -437,7 +417,6 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#rate_limit_resets_in' do
it 'returns the number of seconds after which the rate limit is reset' do
- client = described_class.new('foo')
rate_limit = double(resets_in: 1)
expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
@@ -447,8 +426,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#api_endpoint' do
- let(:client) { described_class.new('foo') }
-
context 'without a custom endpoint configured in Omniauth' do
it 'returns the default API endpoint' do
expect(client)
@@ -473,8 +450,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#web_endpoint' do
- let(:client) { described_class.new('foo') }
-
context 'without a custom endpoint configured in Omniauth' do
it 'returns the default web endpoint' do
expect(client)
@@ -499,8 +474,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#custom_api_endpoint' do
- let(:client) { described_class.new('foo') }
-
context 'without a custom endpoint' do
it 'returns nil' do
expect(client)
@@ -533,8 +506,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#verify_ssl' do
- let(:client) { described_class.new('foo') }
-
context 'without a custom configuration' do
it 'returns true' do
expect(client)
@@ -553,8 +524,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#github_omniauth_provider' do
- let(:client) { described_class.new('foo') }
-
context 'without a configured provider' do
it 'returns an empty Hash' do
expect(Gitlab.config.omniauth)
@@ -576,8 +545,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe '#rate_limiting_enabled?' do
- let(:client) { described_class.new('foo') }
-
it 'returns true when using GitHub.com' do
expect(client.rate_limiting_enabled?).to eq(true)
end
@@ -592,7 +559,6 @@ RSpec.describe Gitlab::GithubImport::Client do
end
describe 'search' do
- let(:client) { described_class.new('foo') }
let(:user) { { login: 'user' } }
let(:org1) { { login: 'org1' } }
let(:org2) { { login: 'org2' } }
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
index 4476b4123ee..6a409762599 100644
--- a/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
@@ -10,7 +10,9 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:issuable) { create(:issue, project: project) }
- let!(:label) { create(:label, project: project) }
+ let(:label) { create(:label, project: project) }
+ let(:label_title) { label.title }
+ let(:label_id) { label.id }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
@@ -18,7 +20,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
'actor' => { 'id' => user.id, 'login' => user.username },
'event' => event_type,
'commit_id' => nil,
- 'label_title' => label.title,
+ 'label_title' => label_title,
'created_at' => '2022-04-26 18:30:53 UTC',
'issue' => { 'number' => issuable.iid, pull_request: issuable.is_a?(MergeRequest) }
)
@@ -27,7 +29,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
let(:event_attrs) do
{
user_id: user.id,
- label_id: label.id,
+ label_id: label_id,
created_at: issue_event.created_at
}.stringify_keys
end
@@ -42,7 +44,6 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
end
before do
- allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
allow(finder).to receive(:database_id).and_return(issuable.id)
end
@@ -52,16 +53,35 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
end
context 'with Issue' do
- context 'when importing a labeled event' do
- let(:event_type) { 'labeled' }
- let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'add') }
+ context 'when importing event with associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
+ end
- it_behaves_like 'new event'
+ context 'when importing a labeled event' do
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'add') }
+
+ it_behaves_like 'new event'
+ end
+
+ context 'when importing an unlabeled event' do
+ let(:event_type) { 'unlabeled' }
+ let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'remove') }
+
+ it_behaves_like 'new event'
+ end
end
- context 'when importing an unlabeled event' do
- let(:event_type) { 'unlabeled' }
- let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'remove') }
+ context 'when importing event without associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(nil)
+ end
+
+ let(:label_title) { 'deleted_label' }
+ let(:label_id) { nil }
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(issue_id: issuable.id, action: 'add') }
it_behaves_like 'new event'
end
@@ -70,16 +90,35 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
context 'with MergeRequest' do
let(:issuable) { create(:merge_request, source_project: project, target_project: project) }
- context 'when importing a labeled event' do
- let(:event_type) { 'labeled' }
- let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'add') }
+ context 'when importing event with associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
+ end
- it_behaves_like 'new event'
+ context 'when importing a labeled event' do
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'add') }
+
+ it_behaves_like 'new event'
+ end
+
+ context 'when importing an unlabeled event' do
+ let(:event_type) { 'unlabeled' }
+ let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'remove') }
+
+ it_behaves_like 'new event'
+ end
end
- context 'when importing an unlabeled event' do
- let(:event_type) { 'unlabeled' }
- let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'remove') }
+ context 'when importing event without associated label' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(nil)
+ end
+
+ let(:label_title) { 'deleted_label' }
+ let(:label_id) { nil }
+ let(:event_type) { 'labeled' }
+ let(:expected_event_attrs) { event_attrs.merge(merge_request_id: issuable.id, action: 'add') }
it_behaves_like 'new event'
end
diff --git a/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
index 027b2ac422e..d6b7411e640 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
@@ -6,20 +6,23 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
subject(:importer) { described_class.new(github_protected_branch, project, client) }
let(:branch_name) { 'protection' }
- let(:allow_force_pushes_on_github) { true }
+ let(:allow_force_pushes_on_github) { false }
+ let(:require_code_owner_reviews_on_github) { false }
let(:required_conversation_resolution) { false }
let(:required_signatures) { false }
let(:required_pull_request_reviews) { false }
let(:expected_push_access_level) { Gitlab::Access::MAINTAINER }
let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
- let(:expected_allow_force_push) { true }
+ let(:expected_allow_force_push) { false }
+ let(:expected_code_owner_approval_required) { false }
let(:github_protected_branch) do
Gitlab::GithubImport::Representation::ProtectedBranch.new(
id: branch_name,
allow_force_pushes: allow_force_pushes_on_github,
required_conversation_resolution: required_conversation_resolution,
required_signatures: required_signatures,
- required_pull_request_reviews: required_pull_request_reviews
+ required_pull_request_reviews: required_pull_request_reviews,
+ require_code_owner_reviews: require_code_owner_reviews_on_github
)
end
@@ -35,7 +38,8 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
name: 'protection',
push_access_levels_attributes: [{ access_level: expected_push_access_level }],
merge_access_levels_attributes: [{ access_level: expected_merge_access_level }],
- allow_force_push: expected_allow_force_push
+ allow_force_push: expected_allow_force_push,
+ code_owner_approval_required: expected_code_owner_approval_required
}
end
@@ -70,41 +74,35 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
end
context 'when branch is protected on GitLab' do
- before do
- create(
- :protected_branch,
- project: project,
- name: 'protect*',
- allow_force_push: allow_force_pushes_on_gitlab
- )
+ using RSpec::Parameterized::TableSyntax
+
+ where(
+ :allow_force_pushes_on_github,
+ :allow_force_pushes_on_gitlab,
+ :expected_allow_force_push
+ ) do
+ true | true | true
+ true | false | false
+ false | true | false
+ false | false | false
end
- context 'when branch protection rule on Gitlab is stricter than on Github' do
- let(:allow_force_pushes_on_github) { true }
- let(:allow_force_pushes_on_gitlab) { false }
- let(:expected_allow_force_push) { false }
-
- it_behaves_like 'create branch protection by the strictest ruleset'
- end
-
- context 'when branch protection rule on Github is stricter than on Gitlab' do
- let(:allow_force_pushes_on_github) { false }
- let(:allow_force_pushes_on_gitlab) { true }
- let(:expected_allow_force_push) { false }
-
- it_behaves_like 'create branch protection by the strictest ruleset'
- end
-
- context 'when branch protection rules on Github and Gitlab are the same' do
- let(:allow_force_pushes_on_github) { true }
- let(:allow_force_pushes_on_gitlab) { true }
- let(:expected_allow_force_push) { true }
+ with_them do
+ before do
+ create(
+ :protected_branch,
+ project: project,
+ name: 'protect*',
+ allow_force_push: allow_force_pushes_on_gitlab
+ )
+ end
it_behaves_like 'create branch protection by the strictest ruleset'
end
end
context 'when branch is not protected on GitLab' do
+ let(:allow_force_pushes_on_github) { true }
let(:expected_allow_force_push) { true }
it_behaves_like 'create branch protection by the strictest ruleset'
@@ -115,6 +113,30 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
allow(project).to receive(:default_branch).and_return(branch_name)
end
+ context 'when "allow force pushes - everyone" rule is enabled' do
+ let(:allow_force_pushes_on_github) { true }
+
+ context 'when there is any default branch protection' do
+ before do
+ stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_FULL)
+ end
+
+ let(:expected_allow_force_push) { false }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when there is no default branch protection' do
+ before do
+ stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_NONE)
+ end
+
+ let(:expected_allow_force_push) { allow_force_pushes_on_github }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+
context 'when required_conversation_resolution rule is enabled' do
let(:required_conversation_resolution) { true }
@@ -241,7 +263,8 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
:protected_branch,
project: project,
name: 'protect*',
- allow_force_push: true
+ allow_force_push: true,
+ code_owner_approval_required: false
)
end
@@ -297,5 +320,67 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
it_behaves_like 'create branch protection by the strictest ruleset'
end
end
+
+ context 'when the code_owner_approval_required feature is available', if: Gitlab.ee? do
+ before do
+ stub_licensed_features(code_owner_approval_required: true)
+ end
+
+ context 'when branch is protected on GitLab' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(
+ :require_code_owner_reviews_on_github,
+ :require_code_owner_reviews_on_gitlab,
+ :expected_code_owner_approval_required
+ ) do
+ true | true | true
+ true | false | true
+ false | true | true
+ false | false | false
+ end
+
+ with_them do
+ before do
+ create(
+ :protected_branch,
+ project: project,
+ name: 'protect*',
+ allow_force_push: true,
+ code_owner_approval_required: require_code_owner_reviews_on_gitlab
+ )
+ end
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+
+ context 'when branch is not protected on GitLab' do
+ context 'when require_code_owner_reviews rule is enabled on GitHub' do
+ let(:require_code_owner_reviews_on_github) { true }
+ let(:expected_code_owner_approval_required) { true }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when require_code_owner_reviews rule is disabled on GitHub' do
+ let(:require_code_owner_reviews_on_github) { false }
+ let(:expected_code_owner_approval_required) { false }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+ end
+
+ context 'when the code_owner_approval_required feature is not available' do
+ before do
+ stub_licensed_features(code_owner_approval_required: false)
+ end
+
+ let(:require_code_owner_reviews_on_github) { true }
+ let(:expected_code_owner_approval_required) { false }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
index a0ced456391..8809d58a252 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
@@ -29,7 +29,10 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
required_signatures = Struct.new(:url, :enabled, keyword_init: true)
enforce_admins = Struct.new(:url, :enabled, keyword_init: true)
allow_option = Struct.new(:enabled, keyword_init: true)
- required_pull_request_reviews = Struct.new(:url, :dismissal_restrictions, keyword_init: true)
+ required_pull_request_reviews = Struct.new(
+ :url, :dismissal_restrictions, :require_code_owner_reviews,
+ keyword_init: true
+ )
response.new(
name: 'main',
url: 'https://example.com/branches/main/protection',
@@ -58,7 +61,8 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
),
required_pull_request_reviews: required_pull_request_reviews.new(
url: 'https://example.com/branches/main/protection/required_pull_request_reviews',
- dismissal_restrictions: {}
+ dismissal_restrictions: {},
+ require_code_owner_reviews: true
)
)
end
@@ -160,6 +164,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
let(:branch_struct) { Struct.new(:protection, :name, :url, keyword_init: true) }
let(:protection_struct) { Struct.new(:enabled, keyword_init: true) }
let(:protected_branch) { branch_struct.new(name: 'main', protection: protection_struct.new(enabled: true)) }
+ let(:second_protected_branch) { branch_struct.new(name: 'fix', protection: protection_struct.new(enabled: true)) }
let(:unprotected_branch) { branch_struct.new(name: 'staging', protection: protection_struct.new(enabled: false)) }
# when user has no admin rights on repo
let(:unknown_protection_branch) { branch_struct.new(name: 'development', protection: nil) }
@@ -168,9 +173,9 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
before do
allow(client).to receive(:branches).with(project.import_source)
- .and_return([protected_branch, unprotected_branch, unknown_protection_branch])
+ .and_return([protected_branch, second_protected_branch, unprotected_branch, unknown_protection_branch])
allow(client).to receive(:branch_protection)
- .with(project.import_source, protected_branch.name).once
+ .with(project.import_source, anything)
.and_return(github_protection_rule)
allow(Gitlab::GithubImport::ObjectCounter).to receive(:increment)
.with(project, :protected_branch, :fetched)
@@ -180,12 +185,13 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
subject.each_object_to_import do |object|
expect(object).to eq github_protection_rule
end
- expect(Gitlab::GithubImport::ObjectCounter).to have_received(:increment).once
+ expect(Gitlab::GithubImport::ObjectCounter).to have_received(:increment).twice
end
context 'when protected branch is already processed' do
it "doesn't process this branch" do
subject.mark_as_imported(protected_branch)
+ subject.mark_as_imported(second_protected_branch)
subject.each_object_to_import {}
expect(Gitlab::GithubImport::ObjectCounter).not_to have_received(:increment)
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index fb6024d0952..49794eceb5a 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -8,11 +8,48 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
let_it_be(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
- let(:client_double) { double(user: { id: 999, login: 'author', email: 'author@email.com' }) }
let(:submitted_at) { Time.new(2017, 1, 1, 12, 00).utc }
+ let(:client_double) do
+ instance_double(
+ 'Gitlab::GithubImport::Client',
+ user: { id: 999, login: 'author', email: 'author@email.com' }
+ )
+ end
subject { described_class.new(review, project, client_double) }
+ shared_examples 'imports a reviewer for the Merge Request' do
+ it 'creates reviewer for the Merge Request' do
+ expect { subject.execute }.to change(MergeRequestReviewer, :count).by(1)
+
+ expect(merge_request.reviewers).to contain_exactly(author)
+ end
+
+ context 'when reviewer already exists' do
+ before do
+ create(
+ :merge_request_reviewer,
+ reviewer: author, merge_request: merge_request, state: 'unreviewed'
+ )
+ end
+
+ it 'does not change Merge Request reviewers' do
+ expect { subject.execute }.not_to change(MergeRequestReviewer, :count)
+
+ expect(merge_request.reviewers).to contain_exactly(author)
+ end
+ end
+ end
+
+ shared_examples 'imports an approval for the Merge Request' do
+ it 'creates an approval for the Merge Request' do
+ expect { subject.execute }.to change(Approval, :count).by(1)
+
+ expect(merge_request.approved_by_users.reload).to include(author)
+ expect(merge_request.approvals.last.created_at).to eq(submitted_at)
+ end
+ end
+
context 'when the review author can be mapped to a gitlab user' do
let_it_be(:author) { create(:user, email: 'author@email.com') }
@@ -20,34 +57,38 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
context 'when the review is "APPROVED"' do
let(:review) { create_review(type: 'APPROVED', note: '') }
- it 'creates a note for the review and approves the Merge Request' do
- expect { subject.execute }
- .to change(Note, :count).by(1)
- .and change(Approval, :count).by(1)
+ it_behaves_like 'imports an approval for the Merge Request'
+ it_behaves_like 'imports a reviewer for the Merge Request'
+
+ it 'creates a note for the review' do
+ expect { subject.execute }.to change(Note, :count).by(1)
last_note = merge_request.notes.last
expect(last_note.note).to eq('approved this merge request')
expect(last_note.author).to eq(author)
expect(last_note.created_at).to eq(submitted_at)
expect(last_note.system_note_metadata.action).to eq('approved')
-
- expect(merge_request.approved_by_users.reload).to include(author)
- expect(merge_request.approvals.last.created_at).to eq(submitted_at)
end
- it 'does nothing if the user already approved the merge request' do
- create(:approval, merge_request: merge_request, user: author)
+ context 'when the user already approved the merge request' do
+ before do
+ create(:approval, merge_request: merge_request, user: author)
+ end
- expect { subject.execute }
- .to change(Note, :count).by(0)
- .and change(Approval, :count).by(0)
+ it 'does not import second approve and note' do
+ expect { subject.execute }
+ .to change(Note, :count).by(0)
+ .and change(Approval, :count).by(0)
+ end
end
end
context 'when the review is "COMMENTED"' do
let(:review) { create_review(type: 'COMMENTED', note: '') }
- it 'creates a note for the review' do
+ it_behaves_like 'imports a reviewer for the Merge Request'
+
+ it 'does not create note for the review' do
expect { subject.execute }.not_to change(Note, :count)
end
end
@@ -55,7 +96,9 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
context 'when the review is "CHANGES_REQUESTED"' do
let(:review) { create_review(type: 'CHANGES_REQUESTED', note: '') }
- it 'creates a note for the review' do
+ it_behaves_like 'imports a reviewer for the Merge Request'
+
+ it 'does not create a note for the review' do
expect { subject.execute }.not_to change(Note, :count)
end
end
@@ -65,10 +108,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
context 'when the review is "APPROVED"' do
let(:review) { create_review(type: 'APPROVED') }
+ it_behaves_like 'imports an approval for the Merge Request'
+ it_behaves_like 'imports a reviewer for the Merge Request'
+
it 'creates a note for the review' do
- expect { subject.execute }
- .to change(Note, :count).by(2)
- .and change(Approval, :count).by(1)
+ expect { subject.execute }.to change(Note, :count).by(2)
note = merge_request.notes.where(system: false).last
expect(note.note).to eq("**Review:** Approved\n\nnote")
@@ -80,9 +124,6 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
expect(system_note.author).to eq(author)
expect(system_note.created_at).to eq(submitted_at)
expect(system_note.system_note_metadata.action).to eq('approved')
-
- expect(merge_request.approved_by_users.reload).to include(author)
- expect(merge_request.approvals.last.created_at).to eq(submitted_at)
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_request_importer_spec.rb
new file mode 100644
index 00000000000..6dcbc4e32e6
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_request_importer_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter, :clean_gitlab_redis_cache do
+ subject(:importer) { described_class.new(review_request, project, client) }
+
+ let(:project) { instance_double('Project') }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:merge_request) { create(:merge_request) }
+ let(:reviewer) { create(:user, username: 'alice') }
+ let(:review_request) do
+ Gitlab::GithubImport::Representation::PullRequests::ReviewRequests.from_json_hash(
+ merge_request_id: merge_request.id,
+ users: [
+ { 'id' => 1, 'login' => reviewer.username },
+ { 'id' => 2, 'login' => 'foo' }
+ ]
+ )
+ end
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(1, reviewer.username).and_return(reviewer.id)
+ allow(finder).to receive(:find).with(2, 'foo').and_return(nil)
+ end
+ end
+
+ it 'imports merge request reviewers that were found' do
+ importer.execute
+
+ expect(merge_request.reviewers.size).to eq 1
+ expect(merge_request.reviewers[0].id).to eq reviewer.id
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
new file mode 100644
index 00000000000..6c7fc4d5b15
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter, :clean_gitlab_redis_cache do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, import_source: 'foo') }
+
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+ let(:review_request_struct) { Struct.new(:merge_request_id, :users, keyword_init: true) }
+ let(:user_struct) { Struct.new(:id, :login, keyword_init: true) }
+
+ shared_context 'when project with merge requests' do
+ let_it_be(:merge_request_1) { create(:merge_request, source_project: project, target_branch: 'feature1') }
+ let_it_be(:merge_request_2) { create(:merge_request, source_project: project, target_branch: 'feature2') }
+
+ let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
+ let(:importer_attrs) do
+ [instance_of(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests), project, client]
+ end
+
+ let(:review_requests_1) do
+ {
+ users: [
+ { id: 4, login: 'alice' },
+ { id: 5, login: 'bob' }
+ ]
+ }
+ end
+
+ let(:review_requests_2) do
+ {
+ users: [{ id: 4, login: 'alice' }]
+ }
+ end
+
+ before do
+ allow(client).to receive(:pull_request_review_requests)
+ .with(project.import_source, merge_request_1.iid).and_return(review_requests_1)
+ allow(client).to receive(:pull_request_review_requests)
+ .with(project.import_source, merge_request_2.iid).and_return(review_requests_2)
+ end
+ end
+
+ describe '#sequential_import' do
+ include_context 'when project with merge requests'
+
+ it 'imports each project merge request reviewers' do
+ expect_next_instances_of(
+ Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter, 2, false, *importer_attrs
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+
+ context 'when merge request is already processed' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(
+ "github-importer/pull_requests/pull_request_review_requests/already-imported/#{project.id}",
+ merge_request_1.iid
+ )
+ end
+
+ it "doesn't import this merge request reviewers" do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter, *importer_attrs
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+ end
+ end
+
+ describe '#parallel_import' do
+ include_context 'when project with merge requests'
+
+ let(:expected_worker_payload) do
+ [
+ [
+ project.id,
+ {
+ merge_request_id: merge_request_1.id,
+ users: [
+ { id: 4, login: 'alice' },
+ { id: 5, login: 'bob' }
+ ]
+ },
+ instance_of(String)
+ ],
+ [
+ project.id,
+ {
+ merge_request_id: merge_request_2.id,
+ users: [
+ { id: 4, login: 'alice' }
+ ]
+ },
+ instance_of(String)
+ ]
+ ]
+ end
+
+ it 'schedule import for each merge request reviewers' do
+ expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
+ .to receive(:bulk_perform_in).with(
+ 1.second,
+ expected_worker_payload,
+ batch_size: 1000,
+ batch_delay: 1.minute
+ )
+
+ importer.parallel_import
+ end
+
+ context 'when merge request is already processed' do
+ before do
+ Gitlab::Cache::Import::Caching.set_add(
+ "github-importer/pull_requests/pull_request_review_requests/already-imported/#{project.id}",
+ merge_request_1.iid
+ )
+ end
+
+ it "doesn't schedule import this merge request reviewers" do
+ expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
+ .to receive(:bulk_perform_in).with(
+ 1.second,
+ expected_worker_payload.slice(1, 1),
+ batch_size: 1000,
+ batch_delay: 1.minute
+ )
+
+ importer.parallel_import
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
index 30b29659eee..60cae79459e 100644
--- a/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
@@ -24,6 +24,10 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
it 'includes the protected branch required_pull_request_reviews' do
expect(protected_branch.required_pull_request_reviews).to eq true
end
+
+ it 'includes the protected branch require_code_owner_reviews' do
+ expect(protected_branch.require_code_owner_reviews).to eq true
+ end
end
end
@@ -35,7 +39,10 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
keyword_init: true
)
enabled_setting = Struct.new(:enabled, keyword_init: true)
- required_pull_request_reviews = Struct.new(:url, :dismissal_restrictions, keyword_init: true)
+ required_pull_request_reviews = Struct.new(
+ :url, :dismissal_restrictions, :require_code_owner_reviews,
+ keyword_init: true
+ )
response.new(
url: 'https://example.com/branches/main/protection',
allow_force_pushes: enabled_setting.new(
@@ -49,7 +56,8 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
),
required_pull_request_reviews: required_pull_request_reviews.new(
url: 'https://example.com/branches/main/protection/required_pull_request_reviews',
- dismissal_restrictions: {}
+ dismissal_restrictions: {},
+ require_code_owner_reviews: true
)
)
end
@@ -67,7 +75,8 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
'allow_force_pushes' => true,
'required_conversation_resolution' => true,
'required_signatures' => true,
- 'required_pull_request_reviews' => true
+ 'required_pull_request_reviews' => true,
+ 'require_code_owner_reviews' => true
}
end
diff --git a/spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb b/spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb
new file mode 100644
index 00000000000..0393f692a69
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::PullRequests::ReviewRequests do
+ shared_examples 'Review requests' do
+ it 'returns an instance of Review Request' do
+ expect(review_requests).to be_an_instance_of(described_class)
+ end
+
+ context 'for returned Review Requests' do
+ it 'includes merge request id' do
+ expect(review_requests.merge_request_id).to eq(merge_request_id)
+ end
+
+ it 'includes reviewers' do
+ expect(review_requests.users.size).to eq 2
+
+ user = review_requests.users[0]
+ expect(user).to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+ expect(user.id).to eq(4)
+ expect(user.login).to eq('alice')
+ end
+ end
+ end
+
+ let(:merge_request_id) { 6501124486 }
+ let(:response) do
+ {
+ 'merge_request_id' => merge_request_id,
+ 'users' => [
+ { 'id' => 4, 'login' => 'alice' },
+ { 'id' => 5, 'login' => 'bob' }
+ ]
+ }
+ end
+
+ describe '.from_api_response' do
+ it_behaves_like 'Review requests' do
+ let(:review_requests) { described_class.from_api_response(response) }
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'Review requests' do
+ let(:review_requests) { described_class.from_json_hash(response) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index dd4dcca809b..5a1fcc5e2dc 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -39,6 +39,58 @@ RSpec.describe Gitlab::GonHelper do
helper.add_gon_variables
end
end
+
+ describe 'sentry configuration' do
+ let(:clientside_dsn) { 'https://xxx@sentry.example.com/1' }
+ let(:environment) { 'staging' }
+
+ describe 'sentry integration' do
+ before do
+ stub_config(sentry: { enabled: true, clientside_dsn: clientside_dsn, environment: environment })
+ end
+
+ it 'sets sentry dsn and environment from config' do
+ expect(gon).to receive(:sentry_dsn=).with(clientside_dsn)
+ expect(gon).to receive(:sentry_environment=).with(environment)
+
+ helper.add_gon_variables
+ end
+ end
+
+ describe 'new sentry integration' do
+ before do
+ stub_application_setting(sentry_enabled: true)
+ stub_application_setting(sentry_clientside_dsn: clientside_dsn)
+ stub_application_setting(sentry_environment: environment)
+ end
+
+ context 'when enable_new_sentry_clientside_integration is disabled' do
+ before do
+ stub_feature_flags(enable_new_sentry_clientside_integration: false)
+ end
+
+ it 'does not set sentry dsn and environment from config' do
+ expect(gon).not_to receive(:sentry_dsn=).with(clientside_dsn)
+ expect(gon).not_to receive(:sentry_environment=).with(environment)
+
+ helper.add_gon_variables
+ end
+ end
+
+ context 'when enable_new_sentry_clientside_integration is enabled' do
+ before do
+ stub_feature_flags(enable_new_sentry_clientside_integration: true)
+ end
+
+ it 'sets sentry dsn and environment from config' do
+ expect(gon).to receive(:sentry_dsn=).with(clientside_dsn)
+ expect(gon).to receive(:sentry_environment=).with(environment)
+
+ helper.add_gon_variables
+ end
+ end
+ end
+ end
end
describe '#push_frontend_feature_flag' do
diff --git a/spec/lib/gitlab/grape_logging/loggers/filter_parameters_spec.rb b/spec/lib/gitlab/grape_logging/loggers/filter_parameters_spec.rb
new file mode 100644
index 00000000000..15c842c9f44
--- /dev/null
+++ b/spec/lib/gitlab/grape_logging/loggers/filter_parameters_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GrapeLogging::Loggers::FilterParameters do
+ subject { described_class.new }
+
+ describe ".parameters" do
+ let(:route) { instance_double('Grape::Router::Route', settings: settings) }
+ let(:endpoint) { instance_double('Grape::Endpoint', route: route) }
+
+ let(:env) do
+ { 'rack.input' => '', Grape::Env::API_ENDPOINT => endpoint }
+ end
+
+ let(:mock_request) { ActionDispatch::Request.new(env) }
+
+ before do
+ mock_request.params['key'] = 'some key'
+ mock_request.params['foo'] = 'wibble'
+ mock_request.params['value'] = 'some value'
+ mock_request.params['oof'] = 'wobble'
+ mock_request.params['other'] = 'Unaffected'
+ end
+
+ context 'when the log_safety setting is provided' do
+ let(:settings) { { log_safety: { safe: %w[foo bar key], unsafe: %w[oof rab value] } } }
+
+ it 'includes safe parameters, and filters unsafe ones' do
+ data = subject.parameters(mock_request, nil)
+
+ expect(data).to eq(
+ params: {
+ 'key' => 'some key',
+ 'foo' => 'wibble',
+ 'value' => '[FILTERED]',
+ 'oof' => '[FILTERED]',
+ 'other' => 'Unaffected'
+ }
+ )
+ end
+ end
+
+ context 'when the log_safety is not provided' do
+ let(:settings) { {} }
+
+ it 'behaves like the normal parameter filter' do
+ data = subject.parameters(mock_request, nil)
+
+ expect(data).to eq(
+ params: {
+ 'key' => '[FILTERED]',
+ 'foo' => 'wibble',
+ 'value' => 'some value',
+ 'oof' => 'wobble',
+ 'other' => 'Unaffected'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/health_checks/gitaly_check_spec.rb b/spec/lib/gitlab/health_checks/gitaly_check_spec.rb
index 000b8eff661..948452c0b58 100644
--- a/spec/lib/gitlab/health_checks/gitaly_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/gitaly_check_spec.rb
@@ -40,9 +40,9 @@ RSpec.describe Gitlab::HealthChecks::GitalyCheck do
end
let(:healthy_check) { double(check: { success: true }) }
- let(:ready_check) { double(readiness_check: { success: false, message: 'Clock is out of sync' }) }
+ let(:ready_check) { double(readiness_check: { success: false, message: 'A readiness check has failed' }) }
- it { is_expected.to match_array([result_class.new('gitaly_check', false, 'Clock is out of sync', shard: 'default')]) }
+ it { is_expected.to match_array([result_class.new('gitaly_check', false, 'A readiness check has failed', shard: 'default')]) }
end
end
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
index cb8fef60ab2..f9a6c25b786 100644
--- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -78,6 +78,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
state
blocking_discussions_resolved
first_contribution
+ detailed_merge_status
].freeze
expect(data).to include(*expected_additional_attributes)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index ccc4f1f7149..e9dde1c6180 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -52,6 +52,7 @@ issues:
- user_mentions
- system_note_metadata
- alert_management_alert
+- alert_management_alerts
- status_page_published_incident
- namespace
- note_authors
@@ -361,10 +362,12 @@ hooks:
- web_hook_logs
protected_branches:
- project
+- group
- merge_access_levels
- push_access_levels
- unprotect_access_levels
- approval_project_rules
+- external_status_checks
- required_code_owners_sections
protected_tags:
- project
@@ -538,6 +541,7 @@ project:
- jenkins_integration
- index_status
- feature_usage
+- regular_or_any_approver_approval_rules
- approval_rules
- approval_merge_request_rules
- approval_merge_request_rule_sources
@@ -548,6 +552,7 @@ project:
- path_locks
- approver_groups
- repository_state
+- wiki_repository
- wiki_repository_state
- source_pipelines
- sourced_pipelines
@@ -643,6 +648,7 @@ project:
- build_artifacts_size_refresh
- project_callouts
- pipeline_metadata
+- disable_download_button
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
index 9af72cc0dea..a6cb74c3c9f 100644
--- a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
+++ b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
context 'when archive path is not a string' do
let(:filepath) { 123 }
- let(:error_message) { 'Archive path is not a string' }
+ let(:error_message) { 'Invalid path' }
it 'returns false' do
expect(subject.valid?).to eq(false)
diff --git a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
index 346f653acd4..5ef9eb78d3b 100644
--- a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::DesignRepoRestorer do
- include GitHelpers
-
describe 'bundle a design Git repo' do
let(:user) { create(:user) }
let!(:project_with_design_repo) { create(:project, :design_repo) }
@@ -29,10 +27,8 @@ RSpec.describe Gitlab::ImportExport::DesignRepoRestorer do
after do
FileUtils.rm_rf(export_path)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.rm_rf(project_with_design_repo.design_repository.path_to_repo)
- FileUtils.rm_rf(project.design_repository.path_to_repo)
- end
+ project_with_design_repo.design_repository.remove
+ project.design_repository.remove
end
it 'restores the repo successfully' do
diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb
index 25c82588c13..9d766eb3af1 100644
--- a/spec/lib/gitlab/import_export/fork_spec.rb
+++ b/spec/lib/gitlab/import_export/fork_spec.rb
@@ -47,10 +47,8 @@ RSpec.describe 'forked project import' do
after do
FileUtils.rm_rf(export_path)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.rm_rf(project_with_repo.repository.path_to_repo)
- FileUtils.rm_rf(project.repository.path_to_repo)
- end
+ project_with_repo.repository.remove
+ project.repository.remove
end
it 'can access the MR', :sidekiq_might_not_need_inline do
diff --git a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
index 85d07e3fe63..79ab1913e7e 100644
--- a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::ImportExport::Group::TreeSaver do
members
milestones
].each do |association|
- path = exported_path_for("#{g.id}", "#{association}.ndjson")
+ path = exported_path_for(g.id.to_s, "#{association}.ndjson")
expect(File.exist?(path)).to eq(true), "#{path} does not exist"
end
end
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 51c0008b2b4..b1f5574fba1 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -96,11 +96,11 @@ RSpec.describe 'Test coverage of the Project Import' do
case item
when Hash
item.each do |k, v|
- if (v.is_a?(Array) || v.is_a?(Hash)) && v.present?
- new_path = path + [k]
- res << new_path
- gather_relations(v, res, new_path)
- end
+ next unless (v.is_a?(Array) || v.is_a?(Hash)) && v.present?
+
+ new_path = path + [k]
+ res << new_path
+ gather_relations(v, res, new_path)
end
when Array
item.each { |i| gather_relations(i, res, path) }
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index 550cefea805..3ca9f727033 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -23,9 +23,7 @@ RSpec.describe Gitlab::ImportExport::MergeRequestParser do
end
after do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.rm_rf(project.repository.path_to_repo)
- end
+ project.repository.remove
end
it 'has a source branch' do
diff --git a/spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb b/spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb
new file mode 100644
index 00000000000..a781139acab
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::Project::ExportedRelationsMerger do
+ let(:export_job) { create(:project_export_job) }
+
+ let(:shared) { Gitlab::ImportExport::Shared.new(export_job.project) }
+
+ before do
+ create(:project_relation_export_upload,
+ relation_export: create(:project_relation_export, relation: 'project', project_export_job: export_job),
+ export_file: fixture_file_upload("spec/fixtures/gitlab/import_export/project.tar.gz")
+ )
+
+ create(:project_relation_export_upload,
+ relation_export: create(:project_relation_export, relation: 'labels', project_export_job: export_job),
+ export_file: fixture_file_upload("spec/fixtures/gitlab/import_export/labels.tar.gz")
+ )
+
+ create(:project_relation_export_upload,
+ relation_export: create(:project_relation_export, relation: 'uploads', project_export_job: export_job),
+ export_file: fixture_file_upload("spec/fixtures/gitlab/import_export/uploads.tar.gz")
+ )
+ end
+
+ describe '#save' do
+ subject(:service) { described_class.new(export_job: export_job, shared: shared) }
+
+ it 'downloads, extracts, and merges all files into export_path' do
+ Dir.mktmpdir do |dirpath|
+ allow(shared).to receive(:export_path).and_return(dirpath)
+
+ result = service.save
+
+ expect(result).to eq(true)
+ expect(Dir.glob("#{dirpath}/**/*")).to match_array(
+ [
+ "#{dirpath}/project",
+ "#{dirpath}/project/project.json",
+ "#{dirpath}/project/labels.ndjson",
+ "#{dirpath}/uploads",
+ "#{dirpath}/uploads/70edb596c34ad7795baa6a0f0aa03d44",
+ "#{dirpath}/uploads/70edb596c34ad7795baa6a0f0aa03d44/file1.txt",
+ "#{dirpath}/uploads/c8c93c6f546b002cbce4cb8d05d0dfb8",
+ "#{dirpath}/uploads/c8c93c6f546b002cbce4cb8d05d0dfb8/file2.txt"
+ ]
+ )
+ end
+ end
+
+ context 'when exception occurs' do
+ before do
+ create(:project_relation_export, relation: 'releases', project_export_job: export_job)
+ create(:project_relation_export, relation: 'issues', project_export_job: export_job)
+ end
+
+ it 'registers the exception messages and returns false' do
+ Dir.mktmpdir do |dirpath|
+ allow(shared).to receive(:export_path).and_return(dirpath)
+
+ result = service.save
+
+ expect(result).to eq(false)
+ expect(shared.errors).to match_array(
+ [
+ "undefined method `export_file' for nil:NilClass",
+ "undefined method `export_file' for nil:NilClass"
+ ]
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/relation_saver_spec.rb b/spec/lib/gitlab/import_export/project/relation_saver_spec.rb
index dec51b3afd1..0467b63e918 100644
--- a/spec/lib/gitlab/import_export/project/relation_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_saver_spec.rb
@@ -28,14 +28,14 @@ RSpec.describe Gitlab::ImportExport::Project::RelationSaver do
it 'serializes the root node as a json file in the export path' do
relation_saver.save # rubocop:disable Rails/SaveBang
- json = read_json(File.join(shared.export_path, 'project.json'))
+ json = read_json(File.join(shared.export_path, 'tree', 'project.json'))
expect(json).to include({ 'description' => 'Project description' })
end
it 'serializes only allowed attributes' do
relation_saver.save # rubocop:disable Rails/SaveBang
- json = read_json(File.join(shared.export_path, 'project.json'))
+ json = read_json(File.join(shared.export_path, 'tree', 'project.json'))
expect(json).to include({ 'description' => 'Project description' })
expect(json.keys).not_to include('name')
end
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationSaver do
it 'serializes the child node as a ndjson file in the export path inside the project folder' do
relation_saver.save # rubocop:disable Rails/SaveBang
- ndjson = read_ndjson(File.join(shared.export_path, 'project', "#{relation}.ndjson"))
+ ndjson = read_ndjson(File.join(shared.export_path, 'tree', 'project', "#{relation}.ndjson"))
expect(ndjson.first).to include({ 'title' => 'Label 1' })
expect(ndjson.second).to include({ 'title' => 'Label 2' })
end
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationSaver do
it 'serializes only allowed attributes' do
relation_saver.save # rubocop:disable Rails/SaveBang
- ndjson = read_ndjson(File.join(shared.export_path, 'project', "#{relation}.ndjson"))
+ ndjson = read_ndjson(File.join(shared.export_path, 'tree', 'project', "#{relation}.ndjson"))
expect(ndjson.first.keys).not_to include('description_html')
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index fae94a3b544..b753746cd8c 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
pipeline = Ci::Pipeline.find_by_sha('sha-notes')
pipeline_metadata = pipeline.pipeline_metadata
- expect(pipeline_metadata.title).to eq('Build pipeline')
+ expect(pipeline_metadata.name).to eq('Build pipeline')
expect(pipeline_metadata.pipeline_id).to eq(pipeline.id)
expect(pipeline_metadata.project_id).to eq(pipeline.project_id)
end
diff --git a/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb b/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
new file mode 100644
index 00000000000..6e5be0b2829
--- /dev/null
+++ b/spec/lib/gitlab/import_export/recursive_merge_folders_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::RecursiveMergeFolders do
+ describe '.merge' do
+ it 'merge folder and ignore symlinks' do
+ Dir.mktmpdir do |tmpdir|
+ source = "#{tmpdir}/source"
+ FileUtils.mkdir_p("#{source}/folder/folder")
+ FileUtils.touch("#{source}/file1.txt")
+ FileUtils.touch("#{source}/folder/file2.txt")
+ FileUtils.touch("#{source}/folder/folder/file3.txt")
+ FileUtils.ln_s("#{source}/file1.txt", "#{source}/symlink-file1.txt")
+ FileUtils.ln_s("#{source}/folder", "#{source}/symlink-folder")
+
+ target = "#{tmpdir}/target"
+ FileUtils.mkdir_p("#{target}/folder/folder")
+ FileUtils.mkdir_p("#{target}/folderA")
+ FileUtils.touch("#{target}/fileA.txt")
+
+ described_class.merge(source, target)
+
+ expect(Dir.children("#{tmpdir}/target")).to match_array(%w[folder file1.txt folderA fileA.txt])
+ expect(Dir.children("#{tmpdir}/target/folder")).to match_array(%w[folder file2.txt])
+ expect(Dir.children("#{tmpdir}/target/folder/folder")).to match_array(%w[file3.txt])
+ end
+ end
+
+ it 'raises an error for invalid source path' do
+ Dir.mktmpdir do |tmpdir|
+ expect do
+ described_class.merge("#{tmpdir}/../", tmpdir)
+ end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end
+ end
+
+ it 'raises an error for source path outside temp dir' do
+ Dir.mktmpdir do |tmpdir|
+ expect do
+ described_class.merge('/', tmpdir )
+ end.to raise_error(StandardError, 'path / is not allowed')
+ end
+ end
+
+ it 'raises an error for invalid target path' do
+ Dir.mktmpdir do |tmpdir|
+ expect do
+ described_class.merge(tmpdir, "#{tmpdir}/../")
+ end.to raise_error(Gitlab::Utils::PathTraversalAttackError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
index c0215ff5843..727ca4f630b 100644
--- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::RepoRestorer do
- include GitHelpers
-
let_it_be(:project_with_repo) do
create(:project, :repository, :wiki_repo, name: 'test-repo-restorer', path: 'test-repo-restorer').tap do |p|
p.wiki.create_page('page', 'foobar', :markdown, 'created page')
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 23eb93a1bce..75d980cd5f4 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -336,7 +336,7 @@ Ci::PipelineMetadata:
- id
- project_id
- pipeline_id
-- title
+- name
Ci::Stage:
- id
- name
@@ -568,6 +568,7 @@ Project:
- suggestion_commit_message
- merge_commit_template
- squash_commit_template
+- issue_branch_template
Author:
- name
ProjectFeature:
@@ -592,6 +593,7 @@ ProjectFeature:
- feature_flags_access_level
- releases_access_level
- monitor_access_level
+- infrastructure_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
diff --git a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
index e529d36fd11..ebb0d62afa0 100644
--- a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::SnippetsRepoRestorer do
- include GitHelpers
-
describe 'bundle a snippet Git repo' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb b/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
index 7afb80488d8..cb4fdeed1a1 100644
--- a/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
+++ b/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker, :freeze_time do
end
it 'returns the list of projects for which deletion warning email has been sent' do
- expected_hash = { "project:1" => "#{Date.current}" }
+ expected_hash = { "project:1" => Date.current.to_s }
expect(Gitlab::InactiveProjectsDeletionWarningTracker.notified_projects).to eq(expected_hash)
end
@@ -57,7 +57,7 @@ RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker, :freeze_time do
end
it 'returns the date if a deletion warning email has been sent for a given project' do
- expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).notification_date).to eq("#{Date.current}")
+ expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).notification_date).to eq(Date.current.to_s)
end
it 'returns nil if a deletion warning email has not been sent for a given project' do
diff --git a/spec/lib/gitlab/incoming_email_spec.rb b/spec/lib/gitlab/incoming_email_spec.rb
index 1545de6d8fd..acd6634058f 100644
--- a/spec/lib/gitlab/incoming_email_spec.rb
+++ b/spec/lib/gitlab/incoming_email_spec.rb
@@ -1,87 +1,17 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::IncomingEmail do
- describe "self.enabled?" do
- context "when reply by email is enabled" do
- before do
- stub_incoming_email_setting(enabled: true)
- end
-
- it 'returns true' do
- expect(described_class.enabled?).to be(true)
- end
- end
-
- context "when reply by email is disabled" do
- before do
- stub_incoming_email_setting(enabled: false)
- end
+ let(:setting_name) { :incoming_email }
- it "returns false" do
- expect(described_class.enabled?).to be(false)
- end
- end
- end
+ it_behaves_like 'common email methods'
- describe 'self.supports_wildcard?' do
- context 'address contains the wildcard placeholder' do
- before do
- stub_incoming_email_setting(address: 'replies+%{key}@example.com')
- end
-
- it 'confirms that wildcard is supported' do
- expect(described_class.supports_wildcard?).to be(true)
- end
- end
-
- context "address doesn't contain the wildcard placeholder" do
- before do
- stub_incoming_email_setting(address: 'replies@example.com')
- end
-
- it 'returns that wildcard is not supported' do
- expect(described_class.supports_wildcard?).to be(false)
- end
- end
-
- context 'address is not set' do
- before do
- stub_incoming_email_setting(address: nil)
- end
-
- it 'returns that wildcard is not supported' do
- expect(described_class.supports_wildcard?).to be(false)
- end
- end
- end
-
- context 'self.unsubscribe_address' do
+ describe 'self.key_from_address' do
before do
stub_incoming_email_setting(address: 'replies+%{key}@example.com')
end
- it 'returns the address with interpolated reply key and unsubscribe suffix' do
- expect(described_class.unsubscribe_address('key')).to eq("replies+key#{Gitlab::IncomingEmail::UNSUBSCRIBE_SUFFIX}@example.com")
- end
- end
-
- context "self.reply_address" do
- before do
- stub_incoming_email_setting(address: "replies+%{key}@example.com")
- end
-
- it "returns the address with an interpolated reply key" do
- expect(described_class.reply_address("key")).to eq("replies+key@example.com")
- end
- end
-
- context "self.key_from_address" do
- before do
- stub_incoming_email_setting(address: "replies+%{key}@example.com")
- end
-
it "returns reply key" do
expect(described_class.key_from_address("replies+key@example.com")).to eq("key")
end
@@ -101,25 +31,4 @@ RSpec.describe Gitlab::IncomingEmail do
end
end
end
-
- context 'self.key_from_fallback_message_id' do
- it 'returns reply key' do
- expect(described_class.key_from_fallback_message_id('reply-key@localhost')).to eq('key')
- end
- end
-
- context 'self.scan_fallback_references' do
- let(:references) do
- '<issue_1@localhost>' \
- ' <reply-59d8df8370b7e95c5a49fbf86aeb2c93@localhost>' \
- ',<exchange@microsoft.com>'
- end
-
- it 'returns reply key' do
- expect(described_class.scan_fallback_references(references))
- .to eq(%w[issue_1@localhost
- reply-59d8df8370b7e95c5a49fbf86aeb2c93@localhost
- exchange@microsoft.com])
- end
- end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
index e4af3f77d5d..58c75bff9dd 100644
--- a/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_cluster_validator_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
it do
stub_rails_env(env)
- args = [:mget, 'foo', 'bar']
+ args = [[:mget, 'foo', 'bar']]
if should_raise
expect { described_class.validate!(args) }
@@ -58,7 +58,7 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
with_them do
it do
- args = [command] + arguments
+ args = [[command] + arguments]
if should_raise
expect { described_class.validate!(args) }
@@ -68,13 +68,32 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
end
end
end
+
+ where(:arguments, :should_raise) do
+ [[:get, "foo"], [:get, "bar"]] | true
+ [[:get, "foo"], [:mget, "foo", "bar"]] | true # mix of single-key and multi-key cmds
+ [[:get, "{foo}:name"], [:get, "{foo}:profile"]] | false
+ [[:del, "foo"], [:del, "bar"]] | true
+ [] | false # pipeline or transaction opened and closed without ops
+ end
+
+ with_them do
+ it do
+ if should_raise
+ expect { described_class.validate!(arguments) }
+ .to raise_error(described_class::CrossSlotError)
+ else
+ expect { described_class.validate!(arguments) }.not_to raise_error
+ end
+ end
+ end
end
describe '.allow_cross_slot_commands' do
it 'does not raise for invalid arguments' do
expect do
described_class.allow_cross_slot_commands do
- described_class.validate!([:mget, 'foo', 'bar'])
+ described_class.validate!([[:mget, 'foo', 'bar']])
end
end.not_to raise_error
end
@@ -83,10 +102,10 @@ RSpec.describe Gitlab::Instrumentation::RedisClusterValidator do
expect do
described_class.allow_cross_slot_commands do
described_class.allow_cross_slot_commands do
- described_class.validate!([:mget, 'foo', 'bar'])
+ described_class.validate!([[:mget, 'foo', 'bar']])
end
- described_class.validate!([:mget, 'foo', 'bar'])
+ described_class.validate!([[:mget, 'foo', 'bar']])
end
end.not_to raise_error
end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index 5b5516f100b..02c5dfb7521 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -57,8 +57,8 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
Gitlab::Redis::SharedState.with do |redis|
redis.pipelined do |pipeline|
- pipeline.call(:get, 'foobar')
- pipeline.call(:get, 'foobarbaz')
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:get, '{foobar}baz')
end
end
end
@@ -103,11 +103,22 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
Gitlab::Redis::SharedState.with do |redis|
redis.pipelined do |pipeline|
- pipeline.call(:get, 'foobar')
- pipeline.call(:get, 'foobarbaz')
+ pipeline.call(:get, '{foobar}:buz')
+ pipeline.call(:get, '{foobar}baz')
end
end
end
+
+ it 'raises error when keys are not from the same slot' do
+ expect do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.pipelined do |pipeline|
+ pipeline.call(:get, 'foo')
+ pipeline.call(:get, 'bar')
+ end
+ end
+ end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
+ end
end
end
diff --git a/spec/lib/gitlab/json_logger_spec.rb b/spec/lib/gitlab/json_logger_spec.rb
index 23f7191454a..801de357ddc 100644
--- a/spec/lib/gitlab/json_logger_spec.rb
+++ b/spec/lib/gitlab/json_logger_spec.rb
@@ -7,6 +7,26 @@ RSpec.describe Gitlab::JsonLogger do
let(:now) { Time.now }
+ describe '#file_name' do
+ let(:subclass) do
+ Class.new(Gitlab::JsonLogger) do
+ def self.file_name_noext
+ 'testlogger'
+ end
+ end
+ end
+
+ it 'raises error when file_name_noext not implemented' do
+ expect { described_class.file_name }.to raise_error(
+ 'JsonLogger implementations must provide file_name_noext implementation'
+ )
+ end
+
+ it 'returns log file name when file_name_noext is implemented' do
+ expect(subclass.file_name).to eq('testlogger.log')
+ end
+ end
+
describe '#format_message' do
before do
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('new-correlation-id')
diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb
index 73276288765..cbfab7e8884 100644
--- a/spec/lib/gitlab/json_spec.rb
+++ b/spec/lib/gitlab/json_spec.rb
@@ -2,6 +2,10 @@
require "spec_helper"
+# We can disable the cop that enforces the use of this class
+# as we need to test around it.
+#
+# rubocop: disable Gitlab/Json
RSpec.describe Gitlab::Json do
before do
stub_feature_flags(json_wrapper_legacy_mode: true)
@@ -429,4 +433,56 @@ RSpec.describe Gitlab::Json do
end
end
end
+
+ describe Gitlab::Json::RailsEncoder do
+ let(:obj) do
+ { foo: "<span>bar</span>" }
+ end
+
+ it "is used by ActiveSupport::JSON" do
+ expect_next_instance_of(described_class) do |encoder|
+ expect(encoder).to receive(:encode).with(obj)
+ end
+
+ ActiveSupport::JSON.encode(obj)
+ end
+
+ it "is used by .to_json calls" do
+ expect_next_instance_of(described_class) do |encoder|
+ expect(encoder).to receive(:encode).with(obj)
+ end
+
+ obj.to_json
+ end
+
+ it "is consistent with the original JSON implementation" do
+ default_encoder = ActiveSupport::JSON::Encoding::JSONGemEncoder
+
+ original_result = ActiveSupport::JSON::Encoding.use_encoder(default_encoder) do
+ ActiveSupport::JSON.encode(obj)
+ end
+
+ new_result = ActiveSupport::JSON::Encoding.use_encoder(described_class) do
+ ActiveSupport::JSON.encode(obj)
+ end
+
+ expect(new_result).to eq(original_result)
+ end
+
+ it "behaves the same when processing invalid unicode data" do
+ invalid_obj = { test: "Gr\x80\x81e" }
+ default_encoder = ActiveSupport::JSON::Encoding::JSONGemEncoder
+
+ original_result = ActiveSupport::JSON::Encoding.use_encoder(default_encoder) do
+ expect { ActiveSupport::JSON.encode(invalid_obj) }.to raise_error(JSON::GeneratorError)
+ end
+
+ new_result = ActiveSupport::JSON::Encoding.use_encoder(described_class) do
+ expect { ActiveSupport::JSON.encode(invalid_obj) }.to raise_error(JSON::GeneratorError)
+ end
+
+ expect(new_result).to eq(original_result)
+ end
+ end
end
+# rubocop: enable Gitlab/Json
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index 0fbb5f31210..34eb48a3221 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -125,6 +125,18 @@ RSpec.describe Gitlab::Kas do
end
end
+ describe '.version_info' do
+ let(:version) { '15.6.0-rc1' }
+
+ before do
+ allow(described_class).to receive(:version).and_return(version)
+ end
+
+ it 'returns gitlab_kas version config, including suffix' do
+ expect(described_class.version_info.to_s).to eq(version)
+ end
+ end
+
describe '.ensure_secret!' do
context 'secret file exists' do
before do
diff --git a/spec/lib/gitlab/kroki_spec.rb b/spec/lib/gitlab/kroki_spec.rb
index 7d29d018ff1..3d6ecf20377 100644
--- a/spec/lib/gitlab/kroki_spec.rb
+++ b/spec/lib/gitlab/kroki_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe Gitlab::Kroki do
describe '.formats' do
def default_formats
- %w[bytefield c4plantuml ditaa erd graphviz nomnoml pikchr plantuml svgbob umlet vega vegalite wavedrom].freeze
+ %w[bytefield c4plantuml ditaa erd graphviz nomnoml pikchr plantuml
+ structurizr svgbob umlet vega vegalite wavedrom].freeze
end
subject { described_class.formats(Gitlab::CurrentSettings) }
diff --git a/spec/lib/gitlab/memory/watchdog/configuration_spec.rb b/spec/lib/gitlab/memory/watchdog/configuration_spec.rb
index 892a4b06ad0..38a39f6a33a 100644
--- a/spec/lib/gitlab/memory/watchdog/configuration_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog/configuration_spec.rb
@@ -78,36 +78,53 @@ RSpec.describe Gitlab::Memory::Watchdog::Configuration do
end
end
- context 'when two monitors are configured to be used' do
- before do
- configuration.monitors.use monitor_class_1, false, { message: 'monitor_1_text' }, max_strikes: 5
- configuration.monitors.use monitor_class_2, true, { message: 'monitor_2_text' }, max_strikes: 0
+ context 'when two different monitor class are configured' do
+ shared_examples 'executes monitors and returns correct results' do
+ it 'calls each monitor and returns correct results', :aggregate_failures do
+ payloads = []
+ thresholds = []
+ strikes = []
+ monitor_names = []
+
+ configuration.monitors.call_each do |result|
+ payloads << result.payload
+ thresholds << result.threshold_violated?
+ strikes << result.strikes_exceeded?
+ monitor_names << result.monitor_name
+ end
+
+ expect(payloads).to eq([payload1, payload2])
+ expect(thresholds).to eq([false, true])
+ expect(strikes).to eq([false, true])
+ expect(monitor_names).to eq([:monitor1, :monitor2])
+ end
+ end
+
+ context 'when monitors are configured inline' do
+ before do
+ configuration.monitors.push monitor_class_1, false, { message: 'monitor_1_text' }, max_strikes: 5
+ configuration.monitors.push monitor_class_2, true, { message: 'monitor_2_text' }, max_strikes: 0
+ end
+
+ include_examples 'executes monitors and returns correct results'
end
- it 'calls each monitor and returns correct results', :aggregate_failures do
- payloads = []
- thresholds = []
- strikes = []
- monitor_names = []
-
- configuration.monitors.call_each do |result|
- payloads << result.payload
- thresholds << result.threshold_violated?
- strikes << result.strikes_exceeded?
- monitor_names << result.monitor_name
+ context 'when monitors are configured in a block' do
+ before do
+ configuration.monitors do |stack|
+ stack.push monitor_class_1, false, { message: 'monitor_1_text' }, max_strikes: 5
+ stack.push monitor_class_2, true, { message: 'monitor_2_text' }, max_strikes: 0
+ end
end
- expect(payloads).to eq([payload1, payload2])
- expect(thresholds).to eq([false, true])
- expect(strikes).to eq([false, true])
- expect(monitor_names).to eq([:monitor1, :monitor2])
+ include_examples 'executes monitors and returns correct results'
end
end
- context 'when same monitor class is configured to be used twice' do
+ context 'when same monitor class is configured twice' do
before do
- configuration.monitors.use monitor_class_1, max_strikes: 1
- configuration.monitors.use monitor_class_1, max_strikes: 1
+ configuration.monitors.push monitor_class_1, max_strikes: 1
+ configuration.monitors.push monitor_class_1, max_strikes: 1
end
it 'calls same monitor only once' do
diff --git a/spec/lib/gitlab/memory/watchdog/configurator_spec.rb b/spec/lib/gitlab/memory/watchdog/configurator_spec.rb
new file mode 100644
index 00000000000..e6f2d57e9e6
--- /dev/null
+++ b/spec/lib/gitlab/memory/watchdog/configurator_spec.rb
@@ -0,0 +1,199 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'prometheus/client'
+require 'sidekiq'
+require_dependency 'gitlab/cluster/lifecycle_events'
+
+RSpec.describe Gitlab::Memory::Watchdog::Configurator do
+ shared_examples 'as configurator' do |handler_class, sleep_time_env, sleep_time|
+ it 'configures the correct handler' do
+ configurator.call(configuration)
+
+ expect(configuration.handler).to be_an_instance_of(handler_class)
+ end
+
+ it 'configures the correct logger' do
+ configurator.call(configuration)
+
+ expect(configuration.logger).to eq(logger)
+ end
+
+ context 'when sleep_time_seconds is not passed through the environment' do
+ let(:sleep_time_seconds) { sleep_time }
+
+ it 'configures the correct sleep time' do
+ configurator.call(configuration)
+
+ expect(configuration.sleep_time_seconds).to eq(sleep_time_seconds)
+ end
+ end
+
+ context 'when sleep_time_seconds is passed through the environment' do
+ let(:sleep_time_seconds) { sleep_time - 1 }
+
+ before do
+ stub_env(sleep_time_env, sleep_time - 1)
+ end
+
+ it 'configures the correct sleep time' do
+ configurator.call(configuration)
+
+ expect(configuration.sleep_time_seconds).to eq(sleep_time_seconds)
+ end
+ end
+ end
+
+ shared_examples 'as monitor configurator' do
+ it 'executes monitors and returns correct results' do
+ configurator.call(configuration)
+
+ payloads = {}
+ configuration.monitors.call_each do |result|
+ payloads[result.monitor_name] = result.payload
+ end
+
+ expect(payloads).to eq(expected_payloads)
+ end
+ end
+
+ let(:configuration) { Gitlab::Memory::Watchdog::Configuration.new }
+
+ # In tests, the Puma constant does not exist so we cannot use a verified double.
+ # rubocop: disable RSpec/VerifiedDoubles
+ describe '.configure_for_puma' do
+ let(:logger) { Gitlab::AppLogger }
+ let(:puma) do
+ Class.new do
+ def self.cli_config
+ Struct.new(:options).new
+ end
+ end
+ end
+
+ subject(:configurator) { described_class.configure_for_puma }
+
+ def stub_prometheus_metrics
+ gauge = instance_double(::Prometheus::Client::Gauge)
+ allow(Gitlab::Metrics).to receive(:gauge).and_return(gauge)
+ allow(gauge).to receive(:set)
+ end
+
+ before do
+ stub_const('Puma', puma)
+ stub_const('Puma::Cluster::WorkerHandle', double.as_null_object)
+ stub_prometheus_metrics
+ end
+
+ it_behaves_like 'as configurator',
+ Gitlab::Memory::Watchdog::PumaHandler,
+ 'GITLAB_MEMWD_SLEEP_TIME_SEC',
+ 60
+
+ context 'with DISABLE_PUMA_WORKER_KILLER set to true' do
+ let(:primary_memory) { 2048 }
+ let(:worker_memory) { max_mem_growth * primary_memory + 1 }
+ let(:expected_payloads) do
+ {
+ heap_fragmentation: {
+ message: 'heap fragmentation limit exceeded',
+ memwd_cur_heap_frag: max_heap_fragmentation + 0.1,
+ memwd_max_heap_frag: max_heap_fragmentation,
+ memwd_max_strikes: max_strikes,
+ memwd_cur_strikes: 1
+
+ },
+ unique_memory_growth: {
+ message: 'memory limit exceeded',
+ memwd_uss_bytes: worker_memory,
+ memwd_ref_uss_bytes: primary_memory,
+ memwd_max_uss_bytes: max_mem_growth * primary_memory,
+ memwd_max_strikes: max_strikes,
+ memwd_cur_strikes: 1
+ }
+ }
+ end
+
+ before do
+ stub_env('DISABLE_PUMA_WORKER_KILLER', true)
+ allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(max_heap_fragmentation + 0.1)
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).and_return({ uss: worker_memory })
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(
+ pid: Gitlab::Cluster::PRIMARY_PID
+ ).and_return({ uss: primary_memory })
+ end
+
+ context 'when settings are set via environment variables' do
+ let(:max_heap_fragmentation) { 0.4 }
+ let(:max_mem_growth) { 4.0 }
+ let(:max_strikes) { 4 }
+
+ before do
+ stub_env('GITLAB_MEMWD_MAX_HEAP_FRAG', 0.4)
+ stub_env('GITLAB_MEMWD_MAX_MEM_GROWTH', 4.0)
+ stub_env('GITLAB_MEMWD_MAX_STRIKES', 4)
+ end
+
+ it_behaves_like 'as monitor configurator'
+ end
+
+ context 'when settings are not set via environment variables' do
+ let(:max_heap_fragmentation) { 0.5 }
+ let(:max_mem_growth) { 3.0 }
+ let(:max_strikes) { 5 }
+
+ it_behaves_like 'as monitor configurator'
+ end
+ end
+
+ context 'with DISABLE_PUMA_WORKER_KILLER set to false' do
+ let(:expected_payloads) do
+ {
+ rss_memory_limit: {
+ message: 'rss memory limit exceeded',
+ memwd_rss_bytes: memory_limit + 1,
+ memwd_max_rss_bytes: memory_limit,
+ memwd_max_strikes: max_strikes,
+ memwd_cur_strikes: 1
+ }
+ }
+ end
+
+ before do
+ stub_env('DISABLE_PUMA_WORKER_KILLER', false)
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return({ total: memory_limit + 1 })
+ end
+
+ context 'when settings are set via environment variables' do
+ let(:memory_limit) { 1300 }
+ let(:max_strikes) { 4 }
+
+ before do
+ stub_env('PUMA_WORKER_MAX_MEMORY', 1300)
+ stub_env('GITLAB_MEMWD_MAX_STRIKES', 4)
+ end
+
+ it_behaves_like 'as monitor configurator'
+ end
+
+ context 'when settings are not set via environment variables' do
+ let(:memory_limit) { 1200 }
+ let(:max_strikes) { 5 }
+
+ it_behaves_like 'as monitor configurator'
+ end
+ end
+ end
+ # rubocop: enable RSpec/VerifiedDoubles
+
+ describe '.configure_for_sidekiq' do
+ let(:logger) { ::Sidekiq.logger }
+
+ subject(:configurator) { described_class.configure_for_sidekiq }
+
+ it_behaves_like 'as configurator',
+ Gitlab::Memory::Watchdog::TermProcessHandler,
+ 'SIDEKIQ_MEMORY_KILLER_CHECK_INTERVAL',
+ 3
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog/monitor/rss_memory_limit_spec.rb b/spec/lib/gitlab/memory/watchdog/monitor/rss_memory_limit_spec.rb
new file mode 100644
index 00000000000..9e25cfda782
--- /dev/null
+++ b/spec/lib/gitlab/memory/watchdog/monitor/rss_memory_limit_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'support/shared_examples/lib/gitlab/memory/watchdog/monitor_result_shared_examples'
+
+RSpec.describe Gitlab::Memory::Watchdog::Monitor::RssMemoryLimit do
+ let(:memory_limit) { 2048 }
+ let(:worker_memory) { 1024 }
+
+ subject(:monitor) do
+ described_class.new(memory_limit: memory_limit)
+ end
+
+ before do
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return({ total: worker_memory })
+ end
+
+ describe '#call' do
+ context 'when process exceeds threshold' do
+ let(:worker_memory) { memory_limit + 1 }
+ let(:payload) do
+ {
+ message: 'rss memory limit exceeded',
+ memwd_rss_bytes: worker_memory,
+ memwd_max_rss_bytes: memory_limit
+ }
+ end
+
+ include_examples 'returns Watchdog Monitor result', threshold_violated: true
+ end
+
+ context 'when process does not exceed threshold' do
+ let(:worker_memory) { memory_limit - 1 }
+ let(:payload) { {} }
+
+ include_examples 'returns Watchdog Monitor result', threshold_violated: false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb
index 84e9a577afb..5d9599d6eab 100644
--- a/spec/lib/gitlab/memory/watchdog_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog_spec.rb
@@ -60,14 +60,16 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures do
describe '#call' do
before do
stub_prometheus_metrics
- allow(Gitlab::Metrics::System).to receive(:memory_usage_rss).at_least(:once).and_return(1024)
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_rss).at_least(:once).and_return(
+ total: 1024
+ )
allow(::Prometheus::PidProvider).to receive(:worker_id).and_return('worker_1')
watchdog.configure do |config|
config.handler = handler
config.logger = logger
config.sleep_time_seconds = sleep_time_seconds
- config.monitors.use monitor_class, threshold_violated, payload, max_strikes: max_strikes
+ config.monitors.push monitor_class, threshold_violated, payload, max_strikes: max_strikes
end
allow(handler).to receive(:call).and_return(true)
@@ -203,8 +205,8 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures do
config.handler = handler
config.logger = logger
config.sleep_time_seconds = sleep_time_seconds
- config.monitors.use monitor_class, threshold_violated, payload, max_strikes: max_strikes
- config.monitors.use monitor_class, threshold_violated, payload, max_strikes: max_strikes
+ config.monitors.push monitor_class, threshold_violated, payload, max_strikes: max_strikes
+ config.monitors.push monitor_class, threshold_violated, payload, max_strikes: max_strikes
end
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
index 50cfa6b64ea..4f437e57600 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
@@ -70,8 +70,8 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::CheckResult do
let(:payload) { { test: 'test' } }
let(:hash) do
{
- 'status' => status,
- 'payload' => payload
+ status: status,
+ payload: payload
}
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
index 2471faf76b2..787ac2874d3 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitla
subject(:redis_interface) { described_class.new }
let(:merge_check) { double(cache_key: '13') }
- let(:result_hash) { { 'test' => 'test' } }
+ let(:result_hash) { { test: 'test' } }
let(:expected_key) { "#{merge_check.cache_key}:#{described_class::VERSION}" }
describe '#save_check' do
diff --git a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
index 0e8b598730c..e4211c6dfd7 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
@@ -10,15 +10,15 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::ResultsStore do
let(:merge_request) { double }
describe '#read' do
- let(:result_hash) { { 'status' => 'success', 'payload' => {} } }
+ let(:result_hash) { { status: 'success', payload: {} } }
it 'calls #retrieve_check on the interface' do
expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(result_hash)
cached_result = results_store.read(merge_check: merge_check)
- expect(cached_result.status).to eq(result_hash['status'].to_sym)
- expect(cached_result.payload).to eq(result_hash['payload'])
+ expect(cached_result.status).to eq(result_hash[:status].to_sym)
+ expect(cached_result.payload).to eq(result_hash[:payload])
end
context 'when #retrieve_check returns nil' do
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index 730a31346d7..f922eff2980 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store
it_behaves_like 'valid dashboard service response'
end
- context 'when the self monitoring dashboard is specified' do
+ context 'when the self-monitoring dashboard is specified' do
let(:dashboard_path) { self_monitoring_dashboard_path }
it_behaves_like 'valid dashboard service response'
@@ -181,7 +181,7 @@ RSpec.describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store
end
end
- context 'when the project is self monitoring' do
+ context 'when the project is self-monitoring' do
let(:self_monitoring_dashboard) do
{
path: self_monitoring_dashboard_path,
@@ -199,7 +199,7 @@ RSpec.describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store
stub_application_setting(self_monitoring_project_id: project.id)
end
- it 'includes self monitoring and project dashboards' do
+ it 'includes self-monitoring and project dashboards' do
project_dashboard = {
path: dashboard_path,
display_name: 'test.yml',
diff --git a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
index f3c8209e0b6..b41b51f53c3 100644
--- a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::Metrics::Dashboard::ServiceSelector do
end
end
- context 'when the path is for the self monitoring dashboard' do
+ context 'when the path is for the self-monitoring dashboard' do
let(:arguments) { { dashboard_path: self_monitoring_dashboard_path } }
it { is_expected.to be Metrics::Dashboard::SelfMonitoringDashboardService }
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
index 830d43169a9..d49200f87cc 100644
--- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
'url' => url,
'namespace' => 'namespace1',
'project' => 'project1',
- 'environment' => "#{environment_id}",
+ 'environment' => environment_id.to_s,
'query' => "?dashboard=config%2Fprometheus%2Fcommon_metrics.yml&environment=#{environment_id}&group=awesome+group&start=2019-08-02T05%3A43%3A09.000Z",
'anchor' => '#title'
}
diff --git a/spec/lib/gitlab/metrics/global_search_slis_spec.rb b/spec/lib/gitlab/metrics/global_search_slis_spec.rb
index 0c09cf6dd71..c10d83664ea 100644
--- a/spec/lib/gitlab/metrics/global_search_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/global_search_slis_spec.rb
@@ -47,10 +47,10 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
describe '#record_apdex' do
where(:search_type, :code_search, :duration_target) do
- 'basic' | false | 7.031
- 'basic' | true | 21.903
- 'advanced' | false | 4.865
- 'advanced' | true | 13.546
+ 'basic' | false | 8.812
+ 'basic' | true | 27.538
+ 'advanced' | false | 2.452
+ 'advanced' | true | 15.52
end
with_them do
diff --git a/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb b/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
new file mode 100644
index 00000000000..58740278425
--- /dev/null
+++ b/spec/lib/gitlab/metrics/loose_foreign_keys_slis_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Metrics::LooseForeignKeysSlis do
+ # This needs to be dynamic because db_config_names depends on
+ # config/database.yml and the specs need to work for all configurations. That
+ # means this assertion is a copy of the implementation.
+ let(:possible_labels) do
+ ::Gitlab::Database.db_config_names.map do |db_config_name|
+ {
+ db_config_name: db_config_name,
+ feature_category: :database
+ }
+ end
+ end
+
+ describe '#initialize_slis!' do
+ it 'initializes Apdex and ErrorRate SLIs for loose_foreign_key_clean_ups' do
+ expect(::Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(
+ :loose_foreign_key_clean_ups,
+ possible_labels
+ )
+
+ expect(::Gitlab::Metrics::Sli::ErrorRate).to receive(:initialize_sli).with(
+ :loose_foreign_key_clean_ups,
+ possible_labels
+ )
+
+ described_class.initialize_slis!
+ end
+ end
+
+ describe '#record_apdex' do
+ context 'with success: true' do
+ it 'increments the loose_foreign_key_clean_ups Apdex as a success' do
+ expect(Gitlab::Metrics::Sli::Apdex[:loose_foreign_key_clean_ups]).to receive(:increment).with(
+ labels: { feature_category: :database, db_config_name: 'main' },
+ success: true
+ )
+
+ described_class.record_apdex(success: true, db_config_name: 'main')
+ end
+ end
+
+ context 'with success: false' do
+ it 'increments the loose_foreign_key_clean_ups Apdex as not a success' do
+ expect(Gitlab::Metrics::Sli::Apdex[:loose_foreign_key_clean_ups]).to receive(:increment).with(
+ labels: { feature_category: :database, db_config_name: 'main' },
+ success: false
+ )
+
+ described_class.record_apdex(success: false, db_config_name: 'main')
+ end
+ end
+ end
+
+ describe '#record_error_rate' do
+ context 'with error: true' do
+ it 'increments the loose_foreign_key_clean_ups ErrorRate as an error' do
+ expect(Gitlab::Metrics::Sli::ErrorRate[:loose_foreign_key_clean_ups]).to receive(:increment).with(
+ labels: { feature_category: :database, db_config_name: 'main' },
+ error: true
+ )
+
+ described_class.record_error_rate(error: true, db_config_name: 'main')
+ end
+ end
+
+ context 'with error: false' do
+ it 'increments the loose_foreign_key_clean_ups ErrorRate as not an error' do
+ expect(Gitlab::Metrics::Sli::ErrorRate[:loose_foreign_key_clean_ups]).to receive(:increment).with(
+ labels: { feature_category: :database, db_config_name: 'main' },
+ error: false
+ )
+
+ described_class.record_error_rate(error: false, db_config_name: 'main')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index 6aa89c7cb05..091f35bfbcc 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -24,47 +24,22 @@ RSpec.describe Gitlab::Metrics::MethodCall do
allow(method_call).to receive(:above_threshold?).and_return(true)
end
- context 'prometheus instrumentation is enabled' do
- before do
- stub_feature_flags(prometheus_metrics_method_instrumentation: true)
- end
-
- around do |example|
- freeze_time do
- example.run
- end
- end
-
- it 'metric is not a NullMetric' do
- method_call.measure { 'foo' }
- expect(::Gitlab::Metrics::WebTransaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).not_to be_instance_of(Gitlab::Metrics::NullMetric)
- end
-
- it 'observes the performance of the supplied block' do
- expect(transaction)
- .to receive(:observe).with(:gitlab_method_call_duration_seconds, be_a_kind_of(Numeric), { method: "#bar", module: :Foo })
-
- method_call.measure { 'foo' }
+ around do |example|
+ freeze_time do
+ example.run
end
end
- context 'prometheus instrumentation is disabled' do
- before do
- stub_feature_flags(prometheus_metrics_method_instrumentation: false)
- end
-
- it 'observes the performance of the supplied block' do
- expect(transaction)
- .to receive(:observe).with(:gitlab_method_call_duration_seconds, be_a_kind_of(Numeric), { method: "#bar", module: :Foo })
-
- method_call.measure { 'foo' }
- end
+ it 'metric is not a NullMetric' do
+ method_call.measure { 'foo' }
+ expect(::Gitlab::Metrics::WebTransaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).not_to be_instance_of(Gitlab::Metrics::NullMetric)
+ end
- it 'observes using NullMetric' do
- method_call.measure { 'foo' }
+ it 'observes the performance of the supplied block' do
+ expect(transaction)
+ .to receive(:observe).with(:gitlab_method_call_duration_seconds, be_a_kind_of(Numeric), { method: "#bar", module: :Foo })
- expect(::Gitlab::Metrics::WebTransaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).to be_instance_of(Gitlab::Metrics::NullMetric)
- end
+ method_call.measure { 'foo' }
end
end
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index b1566ffa7b4..8c46c881ef0 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -35,14 +35,30 @@ RSpec.describe Gitlab::Metrics::Samplers::RubySampler do
end
describe '#sample' do
- it 'adds a metric containing the process resident memory bytes' do
- expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return(9000)
+ it 'adds a metric containing the process total resident memory bytes' do
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return({ total: 9000 })
expect(sampler.metrics[:process_resident_memory_bytes]).to receive(:set).with({}, 9000)
sampler.sample
end
+ it 'adds a metric containing the process anonymous resident memory bytes' do
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return({ anon: 9000 })
+
+ expect(sampler.metrics[:process_resident_anon_memory_bytes]).to receive(:set).with({}, 9000)
+
+ sampler.sample
+ end
+
+ it 'adds a metric containing the process file backed resident memory bytes' do
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return({ file: 9000 })
+
+ expect(sampler.metrics[:process_resident_file_memory_bytes]).to receive(:set).with({}, 9000)
+
+ sampler.sample
+ end
+
it 'adds a metric containing the process unique and proportional memory bytes' do
expect(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).and_return(uss: 9000, pss: 10_000)
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
index b86469eacd1..e4f53ab3f49 100644
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ b/spec/lib/gitlab/metrics/system_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Gitlab::Metrics::System do
VmHWM: 2468 kB
VmRSS: 2468 kB
RssAnon: 260 kB
+ RssFile: 1024 kB
SNIP
end
@@ -132,18 +133,26 @@ RSpec.describe Gitlab::Metrics::System do
describe '.memory_usage_rss' do
context 'without PID' do
- it "returns the current process' resident set size (RSS) in bytes" do
+ it "returns a hash containing RSS metrics in bytes for current process" do
mock_existing_proc_file('/proc/self/status', proc_status)
- expect(described_class.memory_usage_rss).to eq(2527232)
+ expect(described_class.memory_usage_rss).to eq(
+ total: 2527232,
+ anon: 266240,
+ file: 1048576
+ )
end
end
context 'with PID' do
- it "returns the given process' resident set size (RSS) in bytes" do
+ it "returns a hash containing RSS metrics in bytes for given process" do
mock_existing_proc_file('/proc/7/status', proc_status)
- expect(described_class.memory_usage_rss(pid: 7)).to eq(2527232)
+ expect(described_class.memory_usage_rss(pid: 7)).to eq(
+ total: 2527232,
+ anon: 266240,
+ file: 1048576
+ )
end
end
end
@@ -241,8 +250,12 @@ RSpec.describe Gitlab::Metrics::System do
end
describe '.memory_usage_rss' do
- it 'returns 0' do
- expect(described_class.memory_usage_rss).to eq(0)
+ it 'returns 0 for all components' do
+ expect(described_class.memory_usage_rss).to eq(
+ total: 0,
+ anon: 0,
+ file: 0
+ )
end
end
diff --git a/spec/lib/gitlab/observability_spec.rb b/spec/lib/gitlab/observability_spec.rb
new file mode 100644
index 00000000000..2b1d22d9019
--- /dev/null
+++ b/spec/lib/gitlab/observability_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Observability do
+ describe '.observability_url' do
+ let(:gitlab_url) { 'https://example.com' }
+
+ subject { described_class.observability_url }
+
+ before do
+ stub_config_setting(url: gitlab_url)
+ end
+
+ it { is_expected.to eq('https://observe.gitlab.com') }
+
+ context 'when on staging.gitlab.com' do
+ let(:gitlab_url) { Gitlab::Saas.staging_com_url }
+
+ it { is_expected.to eq('https://observe.staging.gitlab.com') }
+ end
+
+ context 'when overriden via ENV' do
+ let(:observe_url) { 'https://example.net' }
+
+ before do
+ stub_env('OVERRIDE_OBSERVABILITY_URL', observe_url)
+ end
+
+ it { is_expected.to eq(observe_url) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/octokit/middleware_spec.rb b/spec/lib/gitlab/octokit/middleware_spec.rb
index 92e424978ff..7bce0788327 100644
--- a/spec/lib/gitlab/octokit/middleware_spec.rb
+++ b/spec/lib/gitlab/octokit/middleware_spec.rb
@@ -66,5 +66,13 @@ RSpec.describe Gitlab::Octokit::Middleware do
it_behaves_like 'Public URL'
end
end
+
+ context 'when a non HTTP/HTTPS URL is provided' do
+ let(:env) { { url: 'ssh://172.16.0.0' } }
+
+ it 'raises an error' do
+ expect { middleware.call(env) }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
index 0bafd436bd0..b5ed583b1f1 100644
--- a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
+++ b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
@@ -99,7 +99,7 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
before do
allow(request_context).to receive(:request).and_return(fake_request)
- allow(finder).to receive(:is_a?).with(BranchesFinder) { true }
+ allow(BranchesFinder).to receive(:===).with(finder).and_return(true)
expect(finder).to receive(:execute).with(gitaly_pagination: true).and_return(branches)
end
diff --git a/spec/lib/gitlab/pagination_delegate_spec.rb b/spec/lib/gitlab/pagination_delegate_spec.rb
new file mode 100644
index 00000000000..7693decd881
--- /dev/null
+++ b/spec/lib/gitlab/pagination_delegate_spec.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::PaginationDelegate do
+ context 'when there is no data' do
+ let(:delegate) do
+ described_class.new(page: 1,
+ per_page: 10,
+ count: 0)
+ end
+
+ it 'shows the correct total count' do
+ expect(delegate.total_count).to eq(0)
+ end
+
+ it 'shows the correct total pages' do
+ expect(delegate.total_pages).to eq(0)
+ end
+
+ it 'shows the correct next page' do
+ expect(delegate.next_page).to be_nil
+ end
+
+ it 'shows the correct previous page' do
+ expect(delegate.prev_page).to be_nil
+ end
+
+ it 'shows the correct current page' do
+ expect(delegate.current_page).to eq(1)
+ end
+
+ it 'shows the correct limit value' do
+ expect(delegate.limit_value).to eq(10)
+ end
+
+ it 'shows the correct first page' do
+ expect(delegate.first_page?).to be true
+ end
+
+ it 'shows the correct last page' do
+ expect(delegate.last_page?).to be true
+ end
+
+ it 'shows the correct offset' do
+ expect(delegate.offset).to eq(0)
+ end
+ end
+
+ context 'with data' do
+ let(:delegate) do
+ described_class.new(page: 5,
+ per_page: 100,
+ count: 1000)
+ end
+
+ it 'shows the correct total count' do
+ expect(delegate.total_count).to eq(1000)
+ end
+
+ it 'shows the correct total pages' do
+ expect(delegate.total_pages).to eq(10)
+ end
+
+ it 'shows the correct next page' do
+ expect(delegate.next_page).to eq(6)
+ end
+
+ it 'shows the correct previous page' do
+ expect(delegate.prev_page).to eq(4)
+ end
+
+ it 'shows the correct current page' do
+ expect(delegate.current_page).to eq(5)
+ end
+
+ it 'shows the correct limit value' do
+ expect(delegate.limit_value).to eq(100)
+ end
+
+ it 'shows the correct first page' do
+ expect(delegate.first_page?).to be false
+ end
+
+ it 'shows the correct last page' do
+ expect(delegate.last_page?).to be false
+ end
+
+ it 'shows the correct offset' do
+ expect(delegate.offset).to eq(400)
+ end
+ end
+
+ context 'for last page' do
+ let(:delegate) do
+ described_class.new(page: 10,
+ per_page: 100,
+ count: 1000)
+ end
+
+ it 'shows the correct total count' do
+ expect(delegate.total_count).to eq(1000)
+ end
+
+ it 'shows the correct total pages' do
+ expect(delegate.total_pages).to eq(10)
+ end
+
+ it 'shows the correct next page' do
+ expect(delegate.next_page).to be_nil
+ end
+
+ it 'shows the correct previous page' do
+ expect(delegate.prev_page).to eq(9)
+ end
+
+ it 'shows the correct current page' do
+ expect(delegate.current_page).to eq(10)
+ end
+
+ it 'shows the correct limit value' do
+ expect(delegate.limit_value).to eq(100)
+ end
+
+ it 'shows the correct first page' do
+ expect(delegate.first_page?).to be false
+ end
+
+ it 'shows the correct last page' do
+ expect(delegate.last_page?).to be true
+ end
+
+ it 'shows the correct offset' do
+ expect(delegate.offset).to eq(900)
+ end
+ end
+
+ context 'with limits and defaults' do
+ it 'has a maximum limit per page' do
+ expect(described_class.new(page: nil,
+ per_page: 1000,
+ count: 0).limit_value).to eq(described_class::MAX_PER_PAGE)
+ end
+
+ it 'has a default per page' do
+ expect(described_class.new(page: nil,
+ per_page: nil,
+ count: 0).limit_value).to eq(described_class::DEFAULT_PER_PAGE)
+ end
+
+ it 'has a maximum page' do
+ expect(described_class.new(page: 100,
+ per_page: 10,
+ count: 1).current_page).to eq(1)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb b/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb
index 05cdc5bb79b..d42cef8bcba 100644
--- a/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb
+++ b/spec/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::PerformanceBar::RedisAdapterWhenPeekEnabled do
it 'stores request id and enqueues stats job' do
expect_to_obtain_exclusive_lease(GitlabPerformanceBarStatsWorker::LEASE_KEY, uuid)
expect(GitlabPerformanceBarStatsWorker).to receive(:perform_in).with(GitlabPerformanceBarStatsWorker::WORKER_DELAY, uuid)
- expect(client).to receive(:sadd).with(GitlabPerformanceBarStatsWorker::STATS_KEY, uuid)
+ expect(client).to receive(:sadd?).with(GitlabPerformanceBarStatsWorker::STATS_KEY, uuid)
expect(client).to receive(:expire).with(GitlabPerformanceBarStatsWorker::STATS_KEY, GitlabPerformanceBarStatsWorker::STATS_KEY_EXPIRE)
peek_adapter.new(client).save('foo')
@@ -56,7 +56,7 @@ RSpec.describe Gitlab::PerformanceBar::RedisAdapterWhenPeekEnabled do
it 'stores request id but does not enqueue any job' do
expect(GitlabPerformanceBarStatsWorker).not_to receive(:perform_in)
- expect(client).to receive(:sadd).with(GitlabPerformanceBarStatsWorker::STATS_KEY, uuid)
+ expect(client).to receive(:sadd?).with(GitlabPerformanceBarStatsWorker::STATS_KEY, uuid)
peek_adapter.new(client).save('foo')
end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index 630369977ff..998fff12e94 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -12,6 +12,20 @@ RSpec.describe Gitlab::ProjectTemplate do
end
end
+ describe '#project_host' do
+ context "when `preview` is valid" do
+ subject { described_class.new('name', 'title', 'description', 'https://gitlab.com/some/project/path').project_host }
+
+ it { is_expected.to eq 'https://gitlab.com' }
+ end
+
+ context "when `preview` is `nil`" do
+ subject { described_class.new('name', 'title', 'description', nil).project_host }
+
+ it { is_expected.to eq nil }
+ end
+ end
+
describe '#project_path' do
subject { described_class.new('name', 'title', 'description', 'https://gitlab.com/some/project/path').project_path }
diff --git a/spec/lib/gitlab/qa_spec.rb b/spec/lib/gitlab/qa_spec.rb
new file mode 100644
index 00000000000..c26f4c89fec
--- /dev/null
+++ b/spec/lib/gitlab/qa_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Qa do
+ describe '.request?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:dot_com, :request_user_agent, :qa_user_agent, :result) do
+ false | 'qa_user_agent' | 'qa_user_agent' | false
+ true | nil | 'qa_user_agent' | false
+ true | '' | 'qa_user_agent' | false
+ true | 'qa_user_agent' | '' | false
+ true | 'qa_user_agent' | nil | false
+ true | 'qa_user_agent' | 'qa_user_agent' | true
+ end
+
+ with_them do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ stub_env('GITLAB_QA_USER_AGENT', qa_user_agent)
+ end
+
+ subject { described_class.request?(request_user_agent) }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/query_limiting/transaction_spec.rb b/spec/lib/gitlab/query_limiting/transaction_spec.rb
index d8eb2040ccc..c11d0a7c18d 100644
--- a/spec/lib/gitlab/query_limiting/transaction_spec.rb
+++ b/spec/lib/gitlab/query_limiting/transaction_spec.rb
@@ -91,6 +91,9 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
SELECT a.attname, a.other_column
FROM pg_attribute a
SQL
+ transaction.increment(
+ "SELECT a.attnum, a.attname\nFROM pg_attribute a\nWHERE a.attrelid = 10605202\nAND a.attnum IN (3)\n"
+ )
end.not_to change(transaction, :count)
end
end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index 8b73b5e03c0..207fe28e84e 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -127,19 +127,15 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
before(:all) do
- primary_store.multi do |multi|
- multi.set(key1, value1)
- multi.set(key2, value2)
- multi.sadd(skey, value1)
- multi.sadd(skey, value2)
- end
+ primary_store.set(key1, value1)
+ primary_store.set(key2, value2)
+ primary_store.sadd?(skey, value1)
+ primary_store.sadd?(skey, value2)
- secondary_store.multi do |multi|
- multi.set(key1, value1)
- multi.set(key2, value2)
- multi.sadd(skey, value1)
- multi.sadd(skey, value2)
- end
+ secondary_store.set(key1, value1)
+ secondary_store.set(key2, value2)
+ secondary_store.sadd?(skey, value1)
+ secondary_store.sadd?(skey, value2)
end
RSpec.shared_examples_for 'reads correct value' do
@@ -211,126 +207,86 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
with_them do
- describe "#{name}" do
+ describe name.to_s do
before do
allow(primary_store).to receive(name).and_call_original
allow(secondary_store).to receive(name).and_call_original
end
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- context 'when reading from the primary is successful' do
- it 'returns the correct value' do
- expect(primary_store).to receive(name).with(*args).and_call_original
-
- subject
- end
-
- it 'does not execute on the secondary store' do
- expect(secondary_store).not_to receive(name)
+ context 'when reading from the primary is successful' do
+ it 'returns the correct value' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
- subject
- end
-
- include_examples 'reads correct value'
+ subject
end
- context 'when reading from primary instance is raising an exception' do
- before do
- allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
- allow(Gitlab::ErrorTracking).to receive(:log_exception)
- end
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
- it 'logs the exception' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
- hash_including(:multi_store_error_message, instance_name: instance_name, command_name: name))
+ subject
+ end
- subject
- end
+ include_examples 'reads correct value'
+ end
- include_examples 'fallback read from the secondary store'
+ context 'when reading from primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
- context 'when reading from primary instance return no value' do
- before do
- allow(primary_store).to receive(name).and_return(nil)
- end
+ it 'logs the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, instance_name: instance_name, command_name: name))
- include_examples 'fallback read from the secondary store'
+ subject
end
- context 'when the command is executed within pipelined block' do
- subject do
- multi_store.pipelined do |pipeline|
- pipeline.send(name, *args)
- end
- end
+ include_examples 'fallback read from the secondary store'
+ end
- it 'is executed only 1 time on primary and secondary instance' do
- expect(primary_store).to receive(:pipelined).and_call_original
- expect(secondary_store).to receive(:pipelined).and_call_original
+ context 'when reading from primary instance return no value' do
+ before do
+ allow(primary_store).to receive(name).and_return(nil)
+ end
- 2.times do
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- expect(pipeline).to receive(name).with(*args).once.and_call_original
- end
- end
+ include_examples 'fallback read from the secondary store'
+ end
- subject
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do |pipeline|
+ pipeline.send(name, *args)
end
end
- if params[:block]
- subject do
- multi_store.send(name, *args, &block)
- end
-
- context 'when block is provided' do
- it 'yields to the block' do
- expect(primary_store).to receive(name).and_yield(value)
+ it 'is executed only 1 time on primary and secondary instance' do
+ expect(primary_store).to receive(:pipelined).and_call_original
+ expect(secondary_store).to receive(:pipelined).and_call_original
- subject
+ 2.times do
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ expect(pipeline).to receive(name).with(*args).once.and_call_original
end
-
- include_examples 'reads correct value'
end
- end
- end
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ subject
end
+ end
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it_behaves_like 'secondary store'
+ if params[:block]
+ subject do
+ multi_store.send(name, *args, &block)
end
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'execute on the primary instance' do
- expect(primary_store).to receive(name).with(*args).and_call_original
+ context 'when block is provided' do
+ it 'yields to the block' do
+ expect(primary_store).to receive(name).and_yield(value)
subject
end
include_examples 'reads correct value'
-
- it 'does not execute on the secondary store' do
- expect(secondary_store).not_to receive(name)
-
- subject
- end
end
end
@@ -372,8 +328,9 @@ RSpec.describe Gitlab::Redis::MultiStore do
let_it_be(:skey) { "redis:set:key" }
let_it_be(:svalues1) { [value2, value1] }
let_it_be(:svalues2) { [value1] }
- let_it_be(:skey_value1) { [skey, value1] }
- let_it_be(:skey_value2) { [skey, value2] }
+ let_it_be(:skey_value1) { [skey, [value1]] }
+ let_it_be(:skey_value2) { [skey, [value2]] }
+ let_it_be(:script) { %(redis.call("set", "#{key1}", "#{value1}")) }
where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do
'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
@@ -383,25 +340,22 @@ RSpec.describe Gitlab::Redis::MultiStore do
'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
+ 'execute :eval command' | :eval | ref(:script) | ref(:value1) | :get | ref(:key1)
end
before do
primary_store.flushdb
secondary_store.flushdb
- primary_store.multi do |multi|
- multi.set(key2, value1)
- multi.sadd(skey, value1)
- end
+ primary_store.set(key2, value1)
+ primary_store.sadd?(skey, value1)
- secondary_store.multi do |multi|
- multi.set(key2, value1)
- multi.sadd(skey, value1)
- end
+ secondary_store.set(key2, value1)
+ secondary_store.sadd?(skey, value1)
end
with_them do
- describe "#{name}" do
+ describe name.to_s do
let(:expected_args) { args || no_args }
before do
@@ -409,100 +363,58 @@ RSpec.describe Gitlab::Redis::MultiStore do
allow(secondary_store).to receive(name).and_call_original
end
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- context 'when executing on primary instance is successful' do
- it 'executes on both primary and secondary redis store', :aggregate_errors do
- expect(primary_store).to receive(name).with(*expected_args).and_call_original
- expect(secondary_store).to receive(name).with(*expected_args).and_call_original
-
- subject
- end
-
- include_examples 'verify that store contains values', :primary_store
- include_examples 'verify that store contains values', :secondary_store
- end
-
- context 'when executing on the primary instance is raising an exception' do
- before do
- allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
- allow(Gitlab::ErrorTracking).to receive(:log_exception)
- end
-
- it 'logs the exception and execute on secondary instance', :aggregate_errors do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
- hash_including(:multi_store_error_message, command_name: name, instance_name: instance_name))
- expect(secondary_store).to receive(name).with(*expected_args).and_call_original
-
- subject
- end
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
- include_examples 'verify that store contains values', :secondary_store
+ subject
end
- context 'when the command is executed within pipelined block' do
- subject do
- multi_store.pipelined do |pipeline|
- pipeline.send(name, *args)
- end
- end
-
- it 'is executed only 1 time on each instance', :aggregate_errors do
- expect(primary_store).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- expect(pipeline).to receive(name).with(*expected_args).once.and_call_original
- end
-
- expect(secondary_store).to receive(:pipelined).and_call_original
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- expect(pipeline).to receive(name).with(*expected_args).once.and_call_original
- end
-
- subject
- end
-
- include_examples 'verify that store contains values', :primary_store
- include_examples 'verify that store contains values', :secondary_store
- end
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
end
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ context 'when executing on the primary instance is raising an exception' do
before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, command_name: name, instance_name: instance_name))
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
- it 'executes only on the secondary redis store', :aggregate_errors do
- expect(secondary_store).to receive(name).with(*expected_args)
- expect(primary_store).not_to receive(name).with(*expected_args)
+ include_examples 'verify that store contains values', :secondary_store
+ end
- subject
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do |pipeline|
+ pipeline.send(name, *args)
end
-
- include_examples 'verify that store contains values', :secondary_store
end
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(:pipelined).and_call_original
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ expect(pipeline).to receive(name).with(*expected_args).once.and_call_original
end
- it 'executes only on the primary_redis redis store', :aggregate_errors do
- expect(primary_store).to receive(name).with(*expected_args)
- expect(secondary_store).not_to receive(name).with(*expected_args)
-
- subject
+ expect(secondary_store).to receive(:pipelined).and_call_original
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ expect(pipeline).to receive(name).with(*expected_args).once.and_call_original
end
- include_examples 'verify that store contains values', :primary_store
+ subject
end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
end
end
end
@@ -537,151 +449,109 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
end
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- context 'when executing on primary instance is successful' do
- it 'executes on both primary and secondary redis store', :aggregate_errors do
- expect(primary_store).to receive(name).and_call_original
- expect(secondary_store).to receive(name).and_call_original
-
- subject
- end
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).and_call_original
+ expect(secondary_store).to receive(name).and_call_original
- include_examples 'verify that store contains values', :primary_store
- include_examples 'verify that store contains values', :secondary_store
+ subject
end
- context 'when executing on the primary instance is raising an exception' do
- before do
- allow(primary_store).to receive(name).and_raise(StandardError)
- allow(Gitlab::ErrorTracking).to receive(:log_exception)
- end
-
- it 'logs the exception and execute on secondary instance', :aggregate_errors do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
- hash_including(:multi_store_error_message, command_name: name))
- expect(secondary_store).to receive(name).and_call_original
-
- subject
- end
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
- include_examples 'verify that store contains values', :secondary_store
+ context 'when executing on the primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
- describe 'return values from a pipelined command' do
- RSpec::Matchers.define :pipeline_diff_error_with_stacktrace do |message|
- match do |object|
- expect(object).to be_a(Gitlab::Redis::MultiStore::PipelinedDiffError)
- expect(object.backtrace).not_to be_nil
- expect(object.message).to eq(message)
- end
- end
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, command_name: name))
+ expect(secondary_store).to receive(name).and_call_original
- subject do
- multi_store.send(name) do |redis|
- redis.get(key1)
- end
- end
-
- context 'when the value exists on both and are equal' do
- before do
- primary_store.set(key1, value1)
- secondary_store.set(key1, value1)
- end
+ subject
+ end
- it 'returns the value' do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ include_examples 'verify that store contains values', :secondary_store
+ end
- expect(subject).to eq([value1])
- end
+ describe 'return values from a pipelined command' do
+ RSpec::Matchers.define :pipeline_diff_error_with_stacktrace do |message|
+ match do |object|
+ expect(object).to be_a(Gitlab::Redis::MultiStore::PipelinedDiffError)
+ expect(object.backtrace).not_to be_nil
+ expect(object.message).to eq(message)
end
+ end
- context 'when the value exists on both but differ' do
- before do
- primary_store.set(key1, value1)
- secondary_store.set(key1, value2)
- end
-
- it 'returns the value from the secondary store, logging an error' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- pipeline_diff_error_with_stacktrace(
- 'Pipelined command executed on both stores successfully but results differ between them. ' \
- "Result from the primary: [#{value1.inspect}]. Result from the secondary: [#{value2.inspect}]."
- ),
- hash_including(command_name: name, instance_name: instance_name)
- ).and_call_original
- expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
-
- expect(subject).to eq([value2])
- end
+ subject do
+ multi_store.send(name) do |redis|
+ redis.get(key1)
end
+ end
- context 'when the value does not exist on the primary but it does on the secondary' do
- before do
- secondary_store.set(key1, value2)
- end
-
- it 'returns the value from the secondary store, logging an error' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- pipeline_diff_error_with_stacktrace(
- 'Pipelined command executed on both stores successfully but results differ between them. ' \
- "Result from the primary: [nil]. Result from the secondary: [#{value2.inspect}]."
- ),
- hash_including(command_name: name, instance_name: instance_name)
- )
- expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
-
- expect(subject).to eq([value2])
- end
+ context 'when the value exists on both and are equal' do
+ before do
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value1)
end
- context 'when the value does not exist in either' do
- it 'returns nil without logging an error' do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
- expect(counter).not_to receive(:increment)
+ it 'returns the value' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
- expect(subject).to eq([nil])
- end
+ expect(subject).to eq([value1])
end
end
- end
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ context 'when the value exists on both but differ' do
before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value2)
end
- it 'executes only on the secondary redis store', :aggregate_errors do
- expect(secondary_store).to receive(name)
- expect(primary_store).not_to receive(name)
-
- subject
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ pipeline_diff_error_with_stacktrace(
+ 'Pipelined command executed on both stores successfully but results differ between them. ' \
+ "Result from the primary: [#{value1.inspect}]. Result from the secondary: [#{value2.inspect}]."
+ ),
+ hash_including(command_name: name, instance_name: instance_name)
+ ).and_call_original
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
end
-
- include_examples 'verify that store contains values', :secondary_store
end
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ context 'when the value does not exist on the primary but it does on the secondary' do
before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ secondary_store.set(key1, value2)
end
- it 'executes only on the primary_redis redis store', :aggregate_errors do
- expect(primary_store).to receive(name)
- expect(secondary_store).not_to receive(name)
-
- subject
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ pipeline_diff_error_with_stacktrace(
+ 'Pipelined command executed on both stores successfully but results differ between them. ' \
+ "Result from the primary: [nil]. Result from the secondary: [#{value2.inspect}]."
+ ),
+ hash_including(command_name: name, instance_name: instance_name)
+ )
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
end
+ end
- include_examples 'verify that store contains values', :primary_store
+ context 'when the value does not exist in either' do
+ it 'returns nil without logging an error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(counter).not_to receive(:increment)
+
+ expect(subject).to eq([nil])
+ end
end
end
end
@@ -825,40 +695,8 @@ RSpec.describe Gitlab::Redis::MultiStore do
describe '#to_s' do
subject { multi_store.to_s }
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- it 'returns same value as primary_store' do
- is_expected.to eq(primary_store.to_s)
- end
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'returns same value as primary_store' do
- is_expected.to eq(primary_store.to_s)
- end
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it 'returns same value as primary_store' do
- is_expected.to eq(secondary_store.to_s)
- end
- end
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
end
end
@@ -869,24 +707,8 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
describe '#use_primary_and_secondary_stores?' do
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be true
- end
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
- end
+ it 'multi store is enabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be true
end
context 'with empty DB' do
@@ -911,24 +733,8 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
describe '#use_primary_store_as_default?' do
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_store_as_default?).to be true
- end
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_store_as_default?).to be false
- end
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be true
end
context 'with empty DB' do
diff --git a/spec/lib/gitlab/request_forgery_protection_spec.rb b/spec/lib/gitlab/request_forgery_protection_spec.rb
index a7b777cf4f2..10842173365 100644
--- a/spec/lib/gitlab/request_forgery_protection_spec.rb
+++ b/spec/lib/gitlab/request_forgery_protection_spec.rb
@@ -13,6 +13,12 @@ RSpec.describe Gitlab::RequestForgeryProtection, :allow_forgery_protection do
}
end
+ it 'logs to /dev/null' do
+ expect(ActiveSupport::Logger).to receive(:new).with(File::NULL)
+
+ described_class::Controller.new.logger
+ end
+
describe '.call' do
context 'when the request method is GET' do
before do
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 86640efed5a..181a911c667 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::Runtime do
before do
stub_const('::Sidekiq', sidekiq_type)
allow(sidekiq_type).to receive(:server?).and_return(true)
- allow(sidekiq_type).to receive(:options).and_return(concurrency: 2)
+ allow(sidekiq_type).to receive(:[]).with(:concurrency).and_return(2)
end
it_behaves_like "valid runtime", :sidekiq, 5
diff --git a/spec/lib/gitlab/service_desk_email_spec.rb b/spec/lib/gitlab/service_desk_email_spec.rb
index 6667b61c02b..69569c0f194 100644
--- a/spec/lib/gitlab/service_desk_email_spec.rb
+++ b/spec/lib/gitlab/service_desk_email_spec.rb
@@ -1,39 +1,11 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::ServiceDeskEmail do
- describe '.enabled?' do
- context 'when service_desk_email is enabled and address is set' do
- before do
- stub_service_desk_email_setting(enabled: true, address: 'foo')
- end
+ let(:setting_name) { :service_desk_email }
- it 'returns true' do
- expect(described_class.enabled?).to be_truthy
- end
- end
-
- context 'when service_desk_email is disabled' do
- before do
- stub_service_desk_email_setting(enabled: false, address: 'foo')
- end
-
- it 'returns false' do
- expect(described_class.enabled?).to be_falsey
- end
- end
-
- context 'when service desk address is not set' do
- before do
- stub_service_desk_email_setting(enabled: true, address: nil)
- end
-
- it 'returns false' do
- expect(described_class.enabled?).to be_falsey
- end
- end
- end
+ it_behaves_like 'common email methods'
describe '.key_from_address' do
context 'when service desk address is set' do
@@ -78,10 +50,4 @@ RSpec.describe Gitlab::ServiceDeskEmail do
end
end
end
-
- context 'self.key_from_fallback_message_id' do
- it 'returns reply key' do
- expect(described_class.key_from_fallback_message_id('reply-key@localhost')).to eq('key')
- end
- end
end
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index c5b00afe672..5f72a3feba7 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -157,7 +157,7 @@ RSpec.describe Gitlab::SidekiqConfig do
allow(::Gitlab::SidekiqConfig::WorkerRouter)
.to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
- allow(Sidekiq).to receive(:options).and_return(queues: %w[default background_migration])
+ allow(Sidekiq).to receive(:[]).with(:queues).and_return(%w[default background_migration])
mappings = described_class.current_worker_queue_mappings
diff --git a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
index 62681b21756..8c9a1abba5a 100644
--- a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
stub_const("#{described_class}::CHECK_INTERVAL_SECONDS", check_interval_seconds)
stub_const("#{described_class}::GRACE_BALLOON_SECONDS", grace_balloon_seconds)
allow(Process).to receive(:getpgrp).and_return(pid)
- allow(Sidekiq).to receive(:options).and_return(timeout: 9)
+ allow(Sidekiq).to receive(:[]).with(:timeout).and_return(9)
end
it 'return true when everything is within limit', :aggregate_failures do
@@ -257,7 +257,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
before do
stub_const("#{described_class}::SHUTDOWN_TIMEOUT_SECONDS", shutdown_timeout_seconds)
stub_feature_flags(sidekiq_memory_killer_read_only_mode: false)
- allow(Sidekiq).to receive(:options).and_return(timeout: 9)
+ allow(Sidekiq).to receive(:[]).with(:timeout).and_return(9)
allow(memory_killer).to receive(:get_rss_kb).and_return(100)
allow(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200)
allow(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300)
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
index 4d12e4b3f6f..44c8df73463 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Client, :clean_gitlab_redis_queues do
+RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Client, :clean_gitlab_redis_queues,
+:clean_gitlab_redis_shared_state do
shared_context 'deduplication worker class' do |strategy, including_scheduled|
let(:worker_class) do
Class.new do
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index d240bf51e67..b6748d49739 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
let(:wal_locations) do
{
- main: '0/D525E3A8',
- ci: 'AB/12345'
+ 'main' => '0/D525E3A8',
+ 'ci' => 'AB/12345'
}
end
@@ -24,10 +24,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
"#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:duplicate:#{queue}:#{hash}"
end
- let(:deduplicated_flag_key) do
- "#{idempotency_key}:deduplicate_flag"
- end
-
describe '#schedule' do
shared_examples 'scheduling with deduplication class' do |strategy_class|
it 'calls schedule on the strategy' do
@@ -81,29 +77,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- shared_examples 'tracking duplicates in redis' do
+ shared_examples 'with Redis cookies' do
+ let(:cookie_key) { "#{idempotency_key}:cookie:v2" }
+ let(:cookie) { get_redis_msgpack(cookie_key) }
+
describe '#check!' do
context 'when there was no job in the queue yet' do
it { expect(duplicate_job.check!).to eq('123') }
shared_examples 'sets Redis keys with correct TTL' do
it "adds an idempotency key with correct ttl" do
- expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from([nil, -2])
- .to(['123', be_within(1).of(expected_ttl)])
- end
+ expected_cookie = {
+ 'jid' => '123',
+ 'offsets' => {},
+ 'wal_locations' => {},
+ 'existing_wal_locations' => wal_locations
+ }
- context 'when wal locations is not empty' do
- it "adds an existing wal locations key with correct ttl" do
- expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([nil, -2])
- .to([wal_locations[:main], be_within(1).of(expected_ttl)])
- .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
- .from([nil, -2])
- .to([wal_locations[:ci], be_within(1).of(expected_ttl)])
- end
+ duplicate_job.check!
+ expect(cookie).to eq(expected_cookie)
+ expect(redis_ttl(cookie_key)).to be_within(1).of(expected_ttl)
end
end
@@ -130,32 +123,23 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'when there was already a job with same arguments in the same queue' do
before do
- set_idempotency_key(idempotency_key, 'existing-key')
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
- end
+ set_idempotency_key(cookie_key, existing_cookie.to_msgpack)
end
- it { expect(duplicate_job.check!).to eq('existing-key') }
+ let(:existing_cookie) { { 'jid' => 'existing-jid' } }
- it "does not change the existing key's TTL" do
- expect { duplicate_job.check! }
- .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from(['existing-key', -1])
- end
+ it { expect(duplicate_job.check!).to eq('existing-jid') }
- it "does not change the existing wal locations key's TTL" do
+ it "does not change the existing key's TTL" do
expect { duplicate_job.check! }
- .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([wal_locations[:main], -1])
- .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
- .from([wal_locations[:ci], -1])
+ .not_to change { redis_ttl(cookie_key) }
+ .from(-1)
end
it 'sets the existing jid' do
duplicate_job.check!
- expect(duplicate_job.existing_jid).to eq('existing-key')
+ expect(duplicate_job.existing_jid).to eq('existing-jid')
end
end
end
@@ -166,115 +150,90 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
{ main: ::ActiveRecord::Base,
ci: ::ActiveRecord::Base })
- set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
- set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
+ with_redis { |r| r.set(cookie_key, initial_cookie.to_msgpack, ex: expected_ttl) }
# read existing_wal_locations
duplicate_job.check!
end
- context "when the key doesn't exists in redis" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
- end
+ let(:initial_cookie) do
+ {
+ 'jid' => 'foobar',
+ 'existing_wal_locations' => { 'main' => '0/D525E3A0', 'ci' => 'AB/12340' },
+ 'offsets' => {},
+ 'wal_locations' => {}
+ }
+ end
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ let(:expected_ttl) { 123 }
+ let(:new_wal) do
+ {
+ # offset is relative to `existing_wal`
+ 'main' => { location: '0/D525E3A8', offset: '8' },
+ 'ci' => { location: 'AB/12345', offset: '5' }
+ }
+ end
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ let(:wal_locations) { new_wal.transform_values { |v| v[:location] } }
- it 'stores a wal location to redis with an offset relative to existing wal location' do
- expect { duplicate_job.update_latest_wal_location! }
- .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from([])
- .to(new_wal_location_with_offset[:main])
- .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from([])
- .to(new_wal_location_with_offset[:ci])
- end
+ it 'stores a wal location to redis with an offset relative to existing wal location' do
+ duplicate_job.update_latest_wal_location!
+
+ expect(cookie['wal_locations']).to eq(wal_locations)
+ expect(cookie['offsets']).to eq(new_wal.transform_values { |v| v[:offset].to_i })
+ expect(redis_ttl(cookie_key)).to be_within(1).of(expected_ttl)
end
+ end
- context "when the key exists in redis" do
- before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
+ describe 'UPDATE_WAL_COOKIE_SCRIPT' do
+ subject do
+ with_redis do |redis|
+ redis.eval(described_class::UPDATE_WAL_COOKIE_SCRIPT, keys: [cookie_key], argv: argv)
end
+ end
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ let(:argv) { ['c1', 1, 'loc1', 'c2', 2, 'loc2', 'c3', 3, 'loc3'] }
- context "when the new offset is bigger then the existing one" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
- end
+ it 'does not create the key' do
+ subject
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A3', '3'],
- ci: ['AB/12342', '2']
- }
- end
+ expect(with_redis { |r| r.get(cookie_key) }).to eq(nil)
+ end
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ context 'when the key exists' do
+ let(:existing_cookie) { { 'offsets' => {}, 'wal_locations' => {} } }
+ let(:expected_ttl) { 123 }
- it 'updates a wal location to redis with an offset' do
- expect { duplicate_job.update_latest_wal_location! }
- .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
- .to(new_wal_location_with_offset[:main])
- .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
- .to(new_wal_location_with_offset[:ci])
- end
+ before do
+ with_redis { |r| r.set(cookie_key, existing_cookie.to_msgpack, ex: expected_ttl) }
end
- context "when the old offset is not bigger then the existing one" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
- end
+ it 'updates all connections' do
+ subject
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ expect(cookie['wal_locations']).to eq({ 'c1' => 'loc1', 'c2' => 'loc2', 'c3' => 'loc3' })
+ expect(cookie['offsets']).to eq({ 'c1' => 1, 'c2' => 2, 'c3' => 3 })
+ end
+
+ it 'preserves the ttl' do
+ subject
- let(:new_wal_location_with_offset) do
+ expect(redis_ttl(cookie_key)).to be_within(1).of(expected_ttl)
+ end
+
+ context 'and low offsets' do
+ let(:existing_cookie) do
{
- # offset is relative to `existing_wal`
- main: ['0/D525E3A2', '2'],
- ci: ['AB/12342', '2']
+ 'offsets' => { 'c1' => 0, 'c2' => 2 },
+ 'wal_locations' => { 'c1' => 'loc1old', 'c2' => 'loc2old' }
}
end
- it "does not update a wal location to redis with an offset" do
- expect { duplicate_job.update_latest_wal_location! }
- .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
- .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
+ it 'updates only some connections' do
+ subject
+
+ expect(cookie['wal_locations']).to eq({ 'c1' => 'loc1', 'c2' => 'loc2old', 'c3' => 'loc3' })
+ expect(cookie['offsets']).to eq({ 'c1' => 1, 'c2' => 2, 'c3' => 3 })
end
end
end
@@ -283,11 +242,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
describe '#latest_wal_locations' do
context 'when job was deduplicated and wal locations were already persisted' do
before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
+ cookie = { 'wal_locations' => { 'main' => 'abc', 'ci' => 'def' } }.to_msgpack
+ set_idempotency_key(cookie_key, cookie)
end
- it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
+ it { expect(duplicate_job.latest_wal_locations).to eq({ 'main' => 'abc', 'ci' => 'def' }) }
end
context 'when job is not deduplication and wal locations were not persisted' do
@@ -302,60 +261,22 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'when the key exists in redis' do
before do
- set_idempotency_key(idempotency_key, 'existing-jid')
- set_idempotency_key(deduplicated_flag_key, 1)
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
- set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
- end
+ set_idempotency_key(cookie_key, "garbage")
end
shared_examples 'deleting the duplicate job' do
shared_examples 'deleting keys from redis' do |key_name|
it "removes the #{key_name} from redis" do
expect { duplicate_job.delete! }
- .to change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
- .to([nil, -2])
+ .to change { with_redis { |r| r.get(key) } }
+ .from(from_value)
+ .to(nil)
end
end
- shared_examples 'does not delete key from redis' do |key_name|
- it "does not remove the #{key_name} from redis" do
- expect { duplicate_job.delete! }
- .to not_change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
- end
- end
-
- it_behaves_like 'deleting keys from redis', 'idempotent key' do
- let(:key) { idempotency_key }
- let(:from_value) { 'existing-jid' }
- end
-
- it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
- let(:key) { deduplicated_flag_key }
- let(:from_value) { '1' }
- end
-
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
- let(:key) { existing_wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
- end
-
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
- let(:key) { existing_wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
- end
-
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
- let(:key) { wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
- end
-
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
- let(:key) { wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
+ it_behaves_like 'deleting keys from redis', 'cookie key' do
+ let(:key) { cookie_key }
+ let(:from_value) { "garbage" }
end
end
@@ -387,15 +308,14 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
describe '#set_deduplicated_flag!' do
context 'when the job is reschedulable' do
before do
+ duplicate_job.check! # ensure cookie exists
allow(duplicate_job).to receive(:reschedulable?) { true }
end
it 'sets the key in Redis' do
duplicate_job.set_deduplicated_flag!
- flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
+ expect(cookie['deduplicated']).to eq('1')
end
it 'sets, gets and cleans up the deduplicated flag' do
@@ -415,11 +335,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
it 'does not set the key in Redis' do
+ duplicate_job.check!
duplicate_job.set_deduplicated_flag!
- flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to be_nil
+ expect(cookie['deduplicated']).to eq(nil)
end
it 'does not set the deduplicated flag' do
@@ -445,43 +364,24 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
expect(duplicate_job.duplicate?).to be(false)
end
- it 'returns false if the existing jid is different from the job jid' do
- set_idempotency_key(idempotency_key, 'a different jid')
+ it 'returns true if the existing jid is different from the job jid' do
+ set_idempotency_key(cookie_key, { 'jid' => 'a different jid' }.to_msgpack)
duplicate_job.check!
expect(duplicate_job.duplicate?).to be(true)
end
end
- def existing_wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:existing_wal_location"
- end
-
- def wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:wal_location"
- end
-
- def set_idempotency_key(key, value = '1')
+ def set_idempotency_key(key, value)
with_redis { |r| r.set(key, value) }
end
- def rpush_to_redis_key(key, wal, offset)
- with_redis { |r| r.rpush(key, [wal, offset]) }
- end
-
- def read_idempotency_key_with_ttl(key)
- with_redis do |redis|
- redis.pipelined do |p|
- p.get(key)
- p.ttl(key)
- end
- end
+ def get_redis_msgpack(key)
+ MessagePack.unpack(with_redis { |redis| redis.get(key) })
end
- def read_range_from_redis(key)
- with_redis do |redis|
- redis.lrange(key, 0, -1)
- end
+ def redis_ttl(key)
+ with_redis { |redis| redis.ttl(key) }
end
end
@@ -497,7 +397,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
duplicate_job.check!
end
- it_behaves_like 'tracking duplicates in redis'
+ it_behaves_like 'with Redis cookies'
end
context 'when both multi-store feature flags are off' do
@@ -517,7 +417,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
duplicate_job.check!
end
- it_behaves_like 'tracking duplicates in redis'
+ it_behaves_like 'with Redis cookies'
end
describe '#scheduled?' do
@@ -562,6 +462,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'with deduplicated flag' do
before do
+ duplicate_job.check! # ensure cookie exists
duplicate_job.set_deduplicated_flag!
end
@@ -578,6 +479,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'with deduplicated flag' do
before do
+ duplicate_job.check! # ensure cookie exists
duplicate_job.set_deduplicated_flag!
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 54a1723afbc..1a53a9b8701 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
describe '.initialize_process_metrics' do
it 'sets concurrency metrics' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
described_class.initialize_process_metrics
end
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
it 'sets the concurrency metric' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
described_class.initialize_process_metrics
end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 14dbeac37e8..af9075f5aa0 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -6,7 +6,7 @@ require 'sidekiq/testing'
RSpec.describe Gitlab::SidekiqMiddleware do
let(:job_args) { [0.01] }
let(:disabled_sidekiq_middlewares) { [] }
- let(:chain) { Sidekiq::Middleware::Chain.new }
+ let(:chain) { Sidekiq::Middleware::Chain.new(Sidekiq) }
let(:queue) { 'test' }
let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares }
let(:worker_class) do
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index d4391d3023a..9ed2a0642fc 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -16,34 +16,42 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
clear_queues
end
- describe '#execute', :aggregate_failures do
+ describe '#migrate_set', :aggregate_failures do
shared_examples 'processing a set' do
- let(:migrator) { described_class.new(set_name) }
+ let(:migrator) { described_class.new(mappings) }
let(:set_after) do
Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
- .map { |item, score| [Sidekiq.load_json(item), score] }
+ .map { |item, score| [Gitlab::Json.load(item), score] }
end
context 'when the set is empty' do
+ let(:mappings) { { 'AuthorizedProjectsWorker' => 'new_queue' } }
+
it 'returns the number of scanned and migrated jobs' do
- expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue')).to eq(scanned: 0, migrated: 0)
+ expect(migrator.migrate_set(set_name)).to eq(
+ scanned: 0,
+ migrated: 0)
end
end
context 'when the set is not empty' do
+ let(:mappings) { {} }
+
it 'returns the number of scanned and migrated jobs' do
create_jobs
- expect(migrator.execute({})).to eq(scanned: 4, migrated: 0)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 0)
end
end
context 'when there are no matching jobs' do
+ let(:mappings) { { 'PostReceive' => 'new_queue' } }
+
it 'does not change any queue names' do
create_jobs(include_post_receive: false)
- expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 3, migrated: 0)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 3, migrated: 0)
expect(set_after.length).to eq(3)
expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects',
@@ -53,10 +61,13 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
context 'when there are matching jobs' do
it 'migrates only the workers matching the given worker from the set' do
+ migrator = described_class.new({ 'AuthorizedProjectsWorker' => 'new_queue' })
freeze_time do
create_jobs
- expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue')).to eq(scanned: 4, migrated: 3)
+ expect(migrator.migrate_set(set_name)).to eq(
+ scanned: 4,
+ migrated: 3)
set_after.each.with_index do |(item, score), i|
if item['class'] == 'AuthorizedProjectsWorker'
@@ -71,11 +82,14 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
end
it 'allows migrating multiple workers at once' do
+ migrator = described_class.new({
+ 'AuthorizedProjectsWorker' => 'new_queue',
+ 'PostReceive' => 'another_queue'
+ })
freeze_time do
create_jobs
- expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'another_queue'))
- .to eq(scanned: 4, migrated: 4)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 4)
set_after.each.with_index do |(item, score), i|
if item['class'] == 'AuthorizedProjectsWorker'
@@ -90,11 +104,14 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
end
it 'allows migrating multiple workers to the same queue' do
+ migrator = described_class.new({
+ 'AuthorizedProjectsWorker' => 'new_queue',
+ 'PostReceive' => 'new_queue'
+ })
freeze_time do
create_jobs
- expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'new_queue'))
- .to eq(scanned: 4, migrated: 4)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 4)
set_after.each.with_index do |(item, score), i|
expect(item).to include('queue' => 'new_queue', 'args' => [i])
@@ -104,16 +121,17 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
end
it 'does not try to migrate jobs that are removed from the set during the migration' do
+ migrator = described_class.new({ 'PostReceive' => 'new_queue' })
freeze_time do
create_jobs
- allow(migrator).to receive(:migrate_job).and_wrap_original do |meth, *args|
- Sidekiq.redis { |c| c.zrem(set_name, args.first) }
+ allow(migrator).to receive(:migrate_job_in_set).and_wrap_original do |meth, *args|
+ Sidekiq.redis { |c| c.zrem(set_name, args.second) }
meth.call(*args)
end
- expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 0)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 0)
expect(set_after.length).to eq(3)
expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects'))
@@ -121,11 +139,12 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
end
it 'does not try to migrate unmatched jobs that are added to the set during the migration' do
+ migrator = described_class.new({ 'PostReceive' => 'new_queue' })
create_jobs
calls = 0
- allow(migrator).to receive(:migrate_job).and_wrap_original do |meth, *args|
+ allow(migrator).to receive(:migrate_job_in_set).and_wrap_original do |meth, *args|
if calls == 0
travel_to(5.hours.from_now) { create_jobs(include_post_receive: false) }
end
@@ -135,18 +154,19 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
meth.call(*args)
end
- expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 1)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 1)
expect(set_after.group_by { |job| job.first['queue'] }.transform_values(&:count))
.to eq('authorized_projects' => 6, 'new_queue' => 1)
end
it 'iterates through the entire set of jobs' do
+ migrator = described_class.new({ 'NonExistentWorker' => 'new_queue' })
50.times do |i|
travel_to(i.hours.from_now) { create_jobs }
end
- expect(migrator.execute('NonExistentWorker' => 'new_queue')).to eq(scanned: 200, migrated: 0)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 200, migrated: 0)
expect(set_after.length).to eq(200)
end
@@ -158,14 +178,16 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
stub_const("#{described_class}::LOG_FREQUENCY", 2)
logger = Logger.new(StringIO.new)
- migrator = described_class.new(set_name, logger: logger)
+ migrator = described_class.new({
+ 'AuthorizedProjectsWorker' => 'new_queue',
+ 'PostReceive' => 'another_queue'
+ }, logger: logger)
expect(logger).to receive(:info).with(a_string_matching('Processing')).once.ordered
expect(logger).to receive(:info).with(a_string_matching('In progress')).once.ordered
expect(logger).to receive(:info).with(a_string_matching('Done')).once.ordered
- expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'new_queue'))
- .to eq(scanned: 4, migrated: 4)
+ expect(migrator.migrate_set(set_name)).to eq(scanned: 4, migrated: 4)
end
end
end
@@ -186,25 +208,6 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
end
context 'retried jobs' do
- let(:set_name) { 'retry' }
- # Account for Sidekiq retry jitter
- # https://github.com/mperham/sidekiq/blob/3575ccb44c688dd08bfbfd937696260b12c622fb/lib/sidekiq/job_retry.rb#L217
- let(:schedule_jitter) { 10 }
-
- # Try to mimic as closely as possible what Sidekiq will actually
- # do to retry a job.
- def retry_in(klass, time, args)
- message = { 'class' => klass.name, 'args' => [args], 'retry' => true }.to_json
-
- allow(klass).to receive(:sidekiq_retry_in_block).and_return(proc { time })
-
- begin
- Sidekiq::JobRetry.new.local(klass, message, klass.queue) { raise 'boom' }
- rescue Sidekiq::JobRetry::Skip
- # Sidekiq scheduled the retry
- end
- end
-
def create_jobs(include_post_receive: true)
retry_in(AuthorizedProjectsWorker, 1.hour, 0)
retry_in(AuthorizedProjectsWorker, 2.hours, 1)
@@ -212,7 +215,248 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
retry_in(AuthorizedProjectsWorker, 4.hours, 3)
end
+ include_context 'when handling retried jobs'
it_behaves_like 'processing a set'
end
end
+
+ describe '#migrate_queues', :aggregate_failures do
+ let(:migrator) { described_class.new(mappings, logger: logger) }
+ let(:logger) { nil }
+
+ def list_queues
+ queues = Sidekiq.redis do |conn|
+ conn.scan_each(match: "queue:*").to_a
+ end
+ queues.uniq.map { |queue| queue.split(':', 2).last }
+ end
+
+ def list_jobs(queue_name)
+ Sidekiq.redis { |conn| conn.lrange("queue:#{queue_name}", 0, -1) }
+ .map { |item| Gitlab::Json.load(item) }
+ end
+
+ def pre_migrate_checks; end
+
+ before do
+ queue_name_from_worker_name = Gitlab::SidekiqConfig::WorkerRouter.method(:queue_name_from_worker_name)
+ EmailReceiverWorker.sidekiq_options(queue: queue_name_from_worker_name.call(EmailReceiverWorker))
+ EmailReceiverWorker.perform_async('foo')
+ EmailReceiverWorker.perform_async('bar')
+
+ # test worker that has ':' inside the queue name
+ AuthorizedProjectUpdate::ProjectRecalculateWorker.sidekiq_options(
+ queue: queue_name_from_worker_name.call(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ )
+ AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async
+ end
+
+ after do
+ # resets the queue name to its original
+ EmailReceiverWorker.set_queue
+ AuthorizedProjectUpdate::ProjectRecalculateWorker.set_queue
+ end
+
+ shared_examples 'migrating queues' do
+ it 'migrates the jobs to the correct destination queue' do
+ queues = list_queues
+ expect(queues).to include(*queues_included_pre_migrate)
+ expect(queues).not_to include(*queues_excluded_pre_migrate)
+ pre_migrate_checks
+
+ migrator.migrate_queues
+
+ queues = list_queues
+ expect(queues).not_to include(*queues_excluded_post_migrate)
+ expect(queues).to include(*queues_included_post_migrate)
+ post_migrate_checks
+ end
+ end
+
+ context 'with all workers mapped to default queue' do
+ let(:mappings) do
+ { 'EmailReceiverWorker' => 'default', 'AuthorizedProjectUpdate::ProjectRecalculateWorker' => 'default' }
+ end
+
+ let(:queues_included_pre_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_excluded_pre_migrate) { ['default'] }
+ let(:queues_excluded_post_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_included_post_migrate) { ['default'] }
+
+ def post_migrate_checks
+ jobs = list_jobs('default')
+ expect(jobs.length).to eq(3)
+ sorted = jobs.sort_by { |job| [job["class"], job["args"]] }
+ expect(sorted[0]).to include('class' => 'AuthorizedProjectUpdate::ProjectRecalculateWorker',
+ 'queue' => 'default')
+ expect(sorted[1]).to include('class' => 'EmailReceiverWorker', 'args' => ['bar'], 'queue' => 'default')
+ expect(sorted[2]).to include('class' => 'EmailReceiverWorker', 'args' => ['foo'], 'queue' => 'default')
+ end
+
+ it_behaves_like 'migrating queues'
+ end
+
+ context 'with custom mapping to different queues' do
+ let(:mappings) do
+ { 'EmailReceiverWorker' => 'new_email',
+ 'AuthorizedProjectUpdate::ProjectRecalculateWorker' => 'new_authorized' }
+ end
+
+ let(:queues_included_pre_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_excluded_pre_migrate) { %w[new_email new_authorized] }
+ let(:queues_excluded_post_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_included_post_migrate) { %w[new_email new_authorized] }
+
+ def post_migrate_checks
+ email_jobs = list_jobs('new_email')
+ expect(email_jobs.length).to eq(2)
+ expect(email_jobs[0]).to include('class' => 'EmailReceiverWorker', 'args' => ['bar'], 'queue' => 'new_email')
+ expect(email_jobs[1]).to include('class' => 'EmailReceiverWorker', 'args' => ['foo'], 'queue' => 'new_email')
+
+ export_jobs = list_jobs('new_authorized')
+ expect(export_jobs.length).to eq(1)
+ expect(export_jobs[0]).to include('class' => 'AuthorizedProjectUpdate::ProjectRecalculateWorker',
+ 'queue' => 'new_authorized')
+ end
+
+ it_behaves_like 'migrating queues'
+ end
+
+ context 'with illegal JSON payload' do
+ let(:job) { '{foo: 1}' }
+ let(:mappings) do
+ { 'EmailReceiverWorker' => 'default', 'AuthorizedProjectUpdate::ProjectRecalculateWorker' => 'default' }
+ end
+
+ let(:queues_included_pre_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_excluded_pre_migrate) { ['default'] }
+ let(:queues_excluded_post_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_included_post_migrate) { ['default'] }
+ let(:logger) { Logger.new(StringIO.new) }
+
+ before do
+ Sidekiq.redis do |conn|
+ conn.lpush("queue:email_receiver", job)
+ end
+ end
+
+ def pre_migrate_checks
+ expect(logger).to receive(:error)
+ .with(a_string_matching('Unmarshal JSON payload from SidekiqMigrateJobs failed'))
+ .once
+ end
+
+ def post_migrate_checks
+ jobs = list_jobs('default')
+ expect(jobs.length).to eq(3)
+ sorted = jobs.sort_by { |job| [job["class"], job["args"]] }
+ expect(sorted[0]).to include('class' => 'AuthorizedProjectUpdate::ProjectRecalculateWorker',
+ 'queue' => 'default')
+ expect(sorted[1]).to include('class' => 'EmailReceiverWorker', 'args' => ['bar'], 'queue' => 'default')
+ expect(sorted[2]).to include('class' => 'EmailReceiverWorker', 'args' => ['foo'], 'queue' => 'default')
+ end
+
+ it_behaves_like 'migrating queues'
+ end
+
+ context 'when multiple workers are in the same queue' do
+ before do
+ ExportCsvWorker.sidekiq_options(queue: 'email_receiver') # follows EmailReceiverWorker's queue
+ ExportCsvWorker.perform_async('fizz')
+ end
+
+ after do
+ ExportCsvWorker.set_queue
+ end
+
+ context 'when the queue exists in mappings' do
+ let(:mappings) do
+ { 'EmailReceiverWorker' => 'email_receiver', 'AuthorizedProjectUpdate::ProjectRecalculateWorker' => 'default',
+ 'ExportCsvWorker' => 'default' }
+ end
+
+ let(:queues_included_pre_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_excluded_pre_migrate) { ['default'] }
+ let(:queues_excluded_post_migrate) do
+ ['authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_included_post_migrate) { %w[default email_receiver] }
+
+ it_behaves_like 'migrating queues'
+ def post_migrate_checks
+ # jobs from email_receiver are not migrated at all
+ jobs = list_jobs('email_receiver')
+ expect(jobs.length).to eq(3)
+ sorted = jobs.sort_by { |job| [job["class"], job["args"]] }
+ expect(sorted[0]).to include('class' => 'EmailReceiverWorker', 'args' => ['bar'], 'queue' => 'email_receiver')
+ expect(sorted[1]).to include('class' => 'EmailReceiverWorker', 'args' => ['foo'], 'queue' => 'email_receiver')
+ expect(sorted[2]).to include('class' => 'ExportCsvWorker', 'args' => ['fizz'], 'queue' => 'email_receiver')
+ end
+ end
+
+ context 'when the queue doesnt exist in mappings' do
+ let(:mappings) do
+ { 'EmailReceiverWorker' => 'default', 'AuthorizedProjectUpdate::ProjectRecalculateWorker' => 'default',
+ 'ExportCsvWorker' => 'default' }
+ end
+
+ let(:queues_included_pre_migrate) do
+ ['email_receiver',
+ 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_excluded_pre_migrate) { ['default'] }
+ let(:queues_excluded_post_migrate) do
+ ['email_receiver', 'authorized_project_update:authorized_project_update_project_recalculate']
+ end
+
+ let(:queues_included_post_migrate) { ['default'] }
+
+ it_behaves_like 'migrating queues'
+ def post_migrate_checks
+ # jobs from email_receiver are all migrated
+ jobs = list_jobs('email_receiver')
+ expect(jobs.length).to eq(0)
+
+ jobs = list_jobs('default')
+ expect(jobs.length).to eq(4)
+ sorted = jobs.sort_by { |job| [job["class"], job["args"]] }
+ expect(sorted[0]).to include('class' => 'AuthorizedProjectUpdate::ProjectRecalculateWorker',
+ 'queue' => 'default')
+ expect(sorted[1]).to include('class' => 'EmailReceiverWorker', 'args' => ['bar'], 'queue' => 'default')
+ expect(sorted[2]).to include('class' => 'EmailReceiverWorker', 'args' => ['foo'], 'queue' => 'default')
+ expect(sorted[3]).to include('class' => 'ExportCsvWorker', 'args' => ['fizz'], 'queue' => 'default')
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/slash_commands/application_help_spec.rb b/spec/lib/gitlab/slash_commands/application_help_spec.rb
index b82121bf3a8..b182c0e5cc6 100644
--- a/spec/lib/gitlab/slash_commands/application_help_spec.rb
+++ b/spec/lib/gitlab/slash_commands/application_help_spec.rb
@@ -4,11 +4,13 @@ require 'spec_helper'
RSpec.describe Gitlab::SlashCommands::ApplicationHelp do
let(:params) { { command: '/gitlab', text: 'help' } }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:chat_user) { create(:chat_name, user: user) }
let(:project) { build(:project) }
describe '#execute' do
subject do
- described_class.new(project, params).execute
+ described_class.new(project, chat_user, params).execute
end
it 'displays the help section' do
@@ -16,5 +18,23 @@ RSpec.describe Gitlab::SlashCommands::ApplicationHelp do
expect(subject[:text]).to include('Available commands')
expect(subject[:text]).to include('/gitlab [project name or alias] issue show')
end
+
+ context 'with incident declare command' do
+ context 'when feature flag is enabled' do
+ it 'displays the declare command' do
+ expect(subject[:text]).to include('/gitlab incident declare')
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(incident_declare_slash_command: false)
+ end
+
+ it 'does not displays the declare command' do
+ expect(subject[:text]).not_to include('/gitlab incident declare')
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/slash_commands/command_spec.rb b/spec/lib/gitlab/slash_commands/command_spec.rb
index 069577b3846..f4664bcfef9 100644
--- a/spec/lib/gitlab/slash_commands/command_spec.rb
+++ b/spec/lib/gitlab/slash_commands/command_spec.rb
@@ -122,5 +122,25 @@ RSpec.describe Gitlab::SlashCommands::Command do
it { is_expected.to eq(Gitlab::SlashCommands::IssueComment) }
end
+
+ context 'when incident declare is triggered' do
+ context 'IncidentNew is triggered' do
+ let(:params) { { text: 'incident declare' } }
+
+ it { is_expected.to eq(Gitlab::SlashCommands::IncidentManagement::IncidentNew) }
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(incident_declare_slash_command: false)
+ end
+
+ context 'IncidentNew is triggered' do
+ let(:params) { { text: 'incident declare' } }
+
+ it { is_expected.not_to eq(Gitlab::SlashCommands::IncidentManagement::IncidentNew) }
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/slash_commands/presenters/incident_management/incident_new_spec.rb b/spec/lib/gitlab/slash_commands/presenters/incident_management/incident_new_spec.rb
new file mode 100644
index 00000000000..cbc584b931f
--- /dev/null
+++ b/spec/lib/gitlab/slash_commands/presenters/incident_management/incident_new_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SlashCommands::Presenters::IncidentManagement::IncidentNew do
+ subject { described_class.new }
+
+ it 'returns the ephemeral message' do
+ message = subject.present('It works!')
+
+ expect(message).to be_a(Hash)
+ expect(message[:text]).to eq('It works!')
+ expect(message[:response_type]).to be(:ephemeral)
+ end
+end
diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb
index 9bf6f0b82bc..60bb006673f 100644
--- a/spec/lib/gitlab/sql/pattern_spec.rb
+++ b/spec/lib/gitlab/sql/pattern_spec.rb
@@ -29,6 +29,9 @@ RSpec.describe Gitlab::SQL::Pattern do
'AH' | %i[title description] | %i[issue3]
'oh' | %i[title] | %i[issue3]
'ah' | %i[description] | %i[issue3]
+
+ '' | %i[title] | %i[issue1 issue2 issue3]
+ %w[a b] | %i[title] | %i[issue1 issue2 issue3]
end
with_them do
@@ -104,14 +107,14 @@ RSpec.describe Gitlab::SQL::Pattern do
end
end
- describe '.select_fuzzy_words' do
- subject(:select_fuzzy_words) { Issue.select_fuzzy_words(query) }
+ describe '.select_fuzzy_terms' do
+ subject(:select_fuzzy_terms) { Issue.select_fuzzy_terms(query) }
context 'with a word equal to 3 chars' do
let(:query) { 'foo' }
- it 'returns array cotaining a word' do
- expect(select_fuzzy_words).to match_array(['foo'])
+ it 'returns array containing a word' do
+ expect(select_fuzzy_terms).to match_array(['foo'])
end
end
@@ -119,7 +122,7 @@ RSpec.describe Gitlab::SQL::Pattern do
let(:query) { 'fo' }
it 'returns empty array' do
- expect(select_fuzzy_words).to match_array([])
+ expect(select_fuzzy_terms).to match_array([])
end
end
@@ -127,7 +130,7 @@ RSpec.describe Gitlab::SQL::Pattern do
let(:query) { 'foo baz' }
it 'returns array containing two words' do
- expect(select_fuzzy_words).to match_array(%w[foo baz])
+ expect(select_fuzzy_terms).to match_array(%w[foo baz])
end
end
@@ -135,7 +138,7 @@ RSpec.describe Gitlab::SQL::Pattern do
let(:query) { 'foo baz' }
it 'returns array containing two words' do
- expect(select_fuzzy_words).to match_array(%w[foo baz])
+ expect(select_fuzzy_terms).to match_array(%w[foo baz])
end
end
@@ -143,7 +146,19 @@ RSpec.describe Gitlab::SQL::Pattern do
let(:query) { 'foo ba' }
it 'returns array containing a word' do
- expect(select_fuzzy_words).to match_array(['foo'])
+ expect(select_fuzzy_terms).to match_array(['foo'])
+ end
+ end
+ end
+
+ describe '.split_query_to_search_terms' do
+ subject(:split_query_to_search_terms) { described_class.split_query_to_search_terms(query) }
+
+ context 'with words separated by spaces' do
+ let(:query) { 'really bar baz' }
+
+ it 'returns array containing individual words' do
+ expect(split_query_to_search_terms).to match_array(%w[really bar baz])
end
end
@@ -151,15 +166,15 @@ RSpec.describe Gitlab::SQL::Pattern do
let(:query) { '"really bar"' }
it 'returns array containing a multi-word' do
- expect(select_fuzzy_words).to match_array(['really bar'])
+ expect(split_query_to_search_terms).to match_array(['really bar'])
end
end
context 'with a multi-word surrounded by double quote and two words' do
let(:query) { 'foo "really bar" baz' }
- it 'returns array containing a multi-word and tow words' do
- expect(select_fuzzy_words).to match_array(['foo', 'really bar', 'baz'])
+ it 'returns array containing a multi-word and two words' do
+ expect(split_query_to_search_terms).to match_array(['foo', 'really bar', 'baz'])
end
end
@@ -167,7 +182,7 @@ RSpec.describe Gitlab::SQL::Pattern do
let(:query) { 'foo"really bar"' }
it 'returns array containing two words with double quote' do
- expect(select_fuzzy_words).to match_array(['foo"really', 'bar"'])
+ expect(split_query_to_search_terms).to match_array(['foo"really', 'bar"'])
end
end
@@ -175,15 +190,15 @@ RSpec.describe Gitlab::SQL::Pattern do
let(:query) { '"really bar"baz' }
it 'returns array containing two words with double quote' do
- expect(select_fuzzy_words).to match_array(['"really', 'bar"baz'])
+ expect(split_query_to_search_terms).to match_array(['"really', 'bar"baz'])
end
end
context 'with two multi-word surrounded by double quote and two words' do
let(:query) { 'foo "really bar" baz "awesome feature"' }
- it 'returns array containing two multi-words and tow words' do
- expect(select_fuzzy_words).to match_array(['foo', 'really bar', 'baz', 'awesome feature'])
+ it 'returns array containing two multi-words and two words' do
+ expect(split_query_to_search_terms).to match_array(['foo', 'really bar', 'baz', 'awesome feature'])
end
end
end
diff --git a/spec/lib/gitlab/tracking/helpers/weak_password_error_event_spec.rb b/spec/lib/gitlab/tracking/helpers/weak_password_error_event_spec.rb
new file mode 100644
index 00000000000..3df10f79e98
--- /dev/null
+++ b/spec/lib/gitlab/tracking/helpers/weak_password_error_event_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::Helpers::WeakPasswordErrorEvent do
+ let(:user) { build(:user) }
+
+ subject(:helper) { Class.new.include(described_class).new }
+
+ context "when user has a weak password error" do
+ before do
+ user.password = "password"
+ user.valid?
+ end
+
+ it "tracks the event" do
+ helper.track_weak_password_error(user, 'A', 'B')
+ expect_snowplow_event(
+ category: 'Gitlab::Tracking::Helpers::WeakPasswordErrorEvent',
+ action: 'track_weak_password_error',
+ controller: 'A',
+ method: 'B'
+ )
+ end
+ end
+
+ context "when user does not have a weak password error" do
+ before do
+ user.password = "short"
+ user.valid?
+ end
+
+ it "does not track the event" do
+ helper.track_weak_password_error(user, 'A', 'B')
+ expect_no_snowplow_event
+ end
+ end
+
+ context "when user does not have any errors" do
+ it "does not track the event" do
+ helper.track_weak_password_error(user, 'A', 'B')
+ expect_no_snowplow_event
+ end
+ end
+end
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index d4f96f1a37f..2e9a444bd24 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -22,8 +22,8 @@ RSpec.describe Gitlab::UrlBuilder do
:group_board | ->(board) { "/groups/#{board.group.full_path}/-/boards/#{board.id}" }
:commit | ->(commit) { "/#{commit.project.full_path}/-/commit/#{commit.id}" }
:issue | ->(issue) { "/#{issue.project.full_path}/-/issues/#{issue.iid}" }
- [:issue, :task] | ->(issue) { "/#{issue.project.full_path}/-/work_items/#{issue.id}" }
- :work_item | ->(work_item) { "/#{work_item.project.full_path}/-/work_items/#{work_item.id}" }
+ [:issue, :task] | ->(issue) { "/#{issue.project.full_path}/-/work_items/#{issue.iid}?iid_path=true" }
+ :work_item | ->(work_item) { "/#{work_item.project.full_path}/-/work_items/#{work_item.iid}?iid_path=true" }
:merge_request | ->(merge_request) { "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}" }
:project_milestone | ->(milestone) { "/#{milestone.project.full_path}/-/milestones/#{milestone.iid}" }
:project_snippet | ->(snippet) { "/#{snippet.project.full_path}/-/snippets/#{snippet.id}" }
@@ -56,6 +56,7 @@ RSpec.describe Gitlab::UrlBuilder do
:discussion_note_on_project_snippet | ->(note) { "/#{note.project.full_path}/-/snippets/#{note.noteable_id}#note_#{note.id}" }
:discussion_note_on_personal_snippet | ->(note) { "/-/snippets/#{note.noteable_id}#note_#{note.id}" }
:note_on_personal_snippet | ->(note) { "/-/snippets/#{note.noteable_id}#note_#{note.id}" }
+ :package | ->(package) { "/#{package.project.full_path}/-/packages/#{package.id}" }
end
with_them do
@@ -71,18 +72,6 @@ RSpec.describe Gitlab::UrlBuilder do
end
end
- context 'when work_items feature flag is disabled' do
- before do
- stub_feature_flags(work_items: false)
- end
-
- it 'returns an issue path for an issue of type task' do
- task = create(:issue, :task)
-
- expect(subject.build(task, only_path: true)).to eq("/#{task.project.full_path}/-/issues/#{task.iid}")
- end
- end
-
context 'when passing a compare' do
# NOTE: The Compare requires an actual repository, which isn't available
# with the `build_stubbed` strategy used by the table tests above
@@ -196,6 +185,18 @@ RSpec.describe Gitlab::UrlBuilder do
end
end
+ context 'when passing Packages::Package' do
+ let(:package) { build_stubbed(:terraform_module_package) }
+
+ context 'with infrastructure package' do
+ it 'returns the url for infrastucture registry' do
+ url = subject.build(package)
+
+ expect(url).to eq "#{Gitlab.config.gitlab.url}/#{package.project.full_path}/-/infrastructure_registry/#{package.id}"
+ end
+ end
+ end
+
context 'when passing a DesignManagement::Design' do
let(:design) { build_stubbed(:design) }
@@ -226,5 +227,27 @@ RSpec.describe Gitlab::UrlBuilder do
expect(subject.build(object, only_path: true)).to eq("/#{project.full_path}")
end
end
+
+ context 'when use_iid_in_work_items_path feature flag is disabled' do
+ before do
+ stub_feature_flags(use_iid_in_work_items_path: false)
+ end
+
+ context 'when a task issue is passed' do
+ it 'returns a path using the work item\'s ID and no query params' do
+ task = create(:issue, :task)
+
+ expect(subject.build(task, only_path: true)).to eq("/#{task.project.full_path}/-/work_items/#{task.id}")
+ end
+ end
+
+ context 'when a work item is passed' do
+ it 'returns a path using the work item\'s ID and no query params' do
+ work_item = create(:work_item)
+
+ expect(subject.build(work_item, only_path: true)).to eq("/#{work_item.project.full_path}/-/work_items/#{work_item.id}")
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index a1bddcb3a47..931340947a2 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -74,13 +74,12 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
describe '#with_instrumentation_class' do
- let(:metric_status) { 'active' }
let(:all_definitions) do
metrics_definitions = [
- { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'data_available' },
- { key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'implemented' },
- { key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'deprecated' },
- { key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: metric_status },
+ { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', status: 'active' },
+ { key_path: 'metric2', instrumentation_class: 'RedisHLLMetric', status: 'broken' },
+ { key_path: 'metric3', instrumentation_class: 'RedisHLLMetric', status: 'active' },
+ { key_path: 'metric4', instrumentation_class: 'RedisHLLMetric', status: 'removed' },
{ key_path: 'metric5', status: 'active' },
{ key_path: 'metric_missing_status' }
]
@@ -92,7 +91,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
it 'includes definitions with instrumentation_class' do
- expect(described_class.with_instrumentation_class.count).to eq(4)
+ expect(described_class.with_instrumentation_class.count).to eq(3)
end
context 'with removed metric' do
@@ -135,8 +134,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:repair_issue_url | nil
:removed_by_url | 1
- :instrumentation_class | 'Metric_Class'
- :instrumentation_class | 'metricClass'
+ :performance_indicator_type | nil
+ :instrumentation_class | 'Metric_Class'
+ :instrumentation_class | 'metricClass'
end
with_them do
@@ -201,9 +201,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
using RSpec::Parameterized::TableSyntax
where(:status, :skip_validation?) do
- 'deprecated' | true
- 'removed' | true
'active' | false
+ 'broken' | false
+ 'removed' | true
end
with_them do
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
index 76eec2755df..1f00f7bbec3 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
@@ -3,403 +3,133 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redis_shared_state do
- let(:entity1) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
- let(:entity2) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
- let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
- let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
let(:end_date) { Date.current }
- let(:sources) { Gitlab::Usage::Metrics::Aggregates::Sources }
let(:namespace) { described_class.to_s.deconstantize.constantize }
+ let(:sources) { Gitlab::Usage::Metrics::Aggregates::Sources }
let_it_be(:recorded_at) { Time.current.to_i }
- def aggregated_metric(name:, time_frame:, source: "redis", events: %w[event1 event2 event3], operator: "OR", feature_flag: nil)
- {
- name: name,
- source: source,
- events: events,
- operator: operator,
- time_frame: time_frame,
- feature_flag: feature_flag
- }.compact.with_indifferent_access
- end
-
- context 'aggregated_metrics_data' do
- shared_examples 'aggregated_metrics_data' do
- context 'no aggregated metric is defined' do
- it 'returns empty hash' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics).and_return([])
- end
-
- expect(aggregated_metrics_data).to eq({})
- end
+ describe '.calculate_count_for_aggregation' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'with valid configuration' do
+ where(:number_of_days, :operator, :datasource, :expected_method) do
+ 28 | 'AND' | 'redis_hll' | :calculate_metrics_intersections
+ 7 | 'AND' | 'redis_hll' | :calculate_metrics_intersections
+ 28 | 'AND' | 'database' | :calculate_metrics_intersections
+ 7 | 'AND' | 'database' | :calculate_metrics_intersections
+ 28 | 'OR' | 'redis_hll' | :calculate_metrics_union
+ 7 | 'OR' | 'redis_hll' | :calculate_metrics_union
+ 28 | 'OR' | 'database' | :calculate_metrics_union
+ 7 | 'OR' | 'database' | :calculate_metrics_union
end
- context 'there are aggregated metrics defined' do
- let(:aggregated_metrics) do
- [
- aggregated_metric(name: "gmau_1", source: datasource, time_frame: time_frame, operator: operator)
- ]
- end
-
- let(:results) { { 'gmau_1' => 5 } }
+ with_them do
+ let(:time_frame) { "#{number_of_days}d" }
+ let(:start_date) { number_of_days.days.ago.to_date }
let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
-
- before do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
- end
- end
-
- context 'with OR operator' do
- let(:operator) { Gitlab::Usage::Metrics::Aggregates::UNION_OF_AGGREGATED_METRICS }
-
- it 'returns the number of unique events occurred for any metric in aggregate', :aggregate_failures do
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
- expect(aggregated_metrics_data).to eq(results)
- end
+ let(:aggregate) do
+ {
+ source: datasource,
+ operator: operator,
+ events: %w[event1 event2]
+ }
end
- context 'with AND operator' do
- let(:operator) { Gitlab::Usage::Metrics::Aggregates::INTERSECTION_OF_AGGREGATED_METRICS }
-
- it 'returns the number of unique events that occurred for all of metrics in the aggregate', :aggregate_failures do
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_intersections).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
- expect(aggregated_metrics_data).to eq(results)
- end
- end
-
- context 'hidden behind feature flag' do
- let(:enabled_feature_flag) { 'test_ff_enabled' }
- let(:disabled_feature_flag) { 'test_ff_disabled' }
- let(:aggregated_metrics) do
- params = { source: datasource, time_frame: time_frame }
- [
- # represents stable aggregated metrics that has been fully released
- aggregated_metric(**params.merge(name: "gmau_without_ff")),
- # represents new aggregated metric that is under performance testing on gitlab.com
- aggregated_metric(**params.merge(name: "gmau_enabled", feature_flag: enabled_feature_flag)),
- # represents aggregated metric that is under development and shouldn't be yet collected even on gitlab.com
- aggregated_metric(**params.merge(name: "gmau_disabled", feature_flag: disabled_feature_flag))
- ]
- end
-
- it 'does not calculate data for aggregates with ff turned off' do
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- stub_feature_flags(enabled_feature_flag => true, disabled_feature_flag => false)
- allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_return(6)
-
- expect(aggregated_metrics_data).to eq('gmau_without_ff' => 6, 'gmau_enabled' => 6)
- end
- end
- end
-
- context 'error handling' do
- context 'development and test environment' do
- it 'raises error when unknown aggregation operator is used' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', source: datasource, operator: "SUM", time_frame: time_frame)])
- end
-
- expect { aggregated_metrics_data }.to raise_error namespace::UnknownAggregationOperator
- end
-
- it 'raises error when unknown aggregation source is used' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', source: 'whoami', time_frame: time_frame)])
- end
-
- expect { aggregated_metrics_data }.to raise_error namespace::UnknownAggregationSource
- end
-
- it 'raises error when union is missing' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)])
- end
- allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
-
- expect { aggregated_metrics_data }.to raise_error sources::UnionNotAvailable
- end
+ subject(:calculate_count_for_aggregation) do
+ described_class
+ .new(recorded_at)
+ .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
- context 'production' do
- before do
- stub_rails_env('production')
- end
-
- it 'rescues unknown aggregation operator error' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', source: datasource, operator: "SUM", time_frame: time_frame)])
- end
-
- expect(aggregated_metrics_data).to eq('gmau_1' => -1)
- end
-
- it 'rescues unknown aggregation source error' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', source: 'whoami', time_frame: time_frame)])
- end
-
- expect(aggregated_metrics_data).to eq('gmau_1' => -1)
- end
-
- it 'rescues error when union is missing' do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)])
- end
- allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
-
- expect(aggregated_metrics_data).to eq('gmau_1' => -1)
- end
+ it 'returns the number of unique events for aggregation', :aggregate_failures do
+ expect(namespace::SOURCES[datasource])
+ .to receive(expected_method)
+ .with(params.merge(metric_names: %w[event1 event2]))
+ .and_return(5)
+ expect(calculate_count_for_aggregation).to eq(5)
end
end
end
- shared_examples 'database_sourced_aggregated_metrics' do
- let(:datasource) { namespace::DATABASE_SOURCE }
-
- it_behaves_like 'aggregated_metrics_data'
- end
-
- shared_examples 'redis_sourced_aggregated_metrics' do
- let(:datasource) { namespace::REDIS_SOURCE }
-
- it_behaves_like 'aggregated_metrics_data' do
- context 'error handling' do
- let(:aggregated_metrics) { [aggregated_metric(name: 'gmau_1', source: datasource, time_frame: time_frame)] }
- let(:error) { Gitlab::UsageDataCounters::HLLRedisCounter::EventError }
-
- before do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
- end
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_raise(error)
- end
-
- context 'development and test environment' do
- it 're raises Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
- expect { aggregated_metrics_data }.to raise_error error
- end
- end
-
- context 'production' do
- it 'rescues Gitlab::UsageDataCounters::HLLRedisCounter::EventError' do
- stub_rails_env('production')
-
- expect(aggregated_metrics_data).to eq('gmau_1' => -1)
- end
- end
- end
+ context 'with invalid configuration' do
+ where(:time_frame, :operator, :datasource, :expected_error) do
+ '28d' | 'SUM' | 'redis_hll' | namespace::UnknownAggregationOperator
+ '7d' | 'AND' | 'mongodb' | namespace::UnknownAggregationSource
+ 'all' | 'AND' | 'redis_hll' | namespace::DisallowedAggregationTimeFrame
end
- end
-
- describe '.aggregated_metrics_all_time_data' do
- subject(:aggregated_metrics_data) { described_class.new(recorded_at).all_time_data }
- let(:start_date) { nil }
- let(:end_date) { nil }
- let(:time_frame) { ['all'] }
-
- it_behaves_like 'database_sourced_aggregated_metrics'
-
- context 'redis sourced aggregated metrics' do
- let(:aggregated_metrics) { [aggregated_metric(name: 'gmau_1', time_frame: time_frame)] }
+ with_them do
+ let(:aggregate) do
+ {
+ source: datasource,
+ operator: operator,
+ events: %w[event1 event2]
+ }
+ end
- before do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
- end
+ subject(:calculate_count_for_aggregation) do
+ described_class
+ .new(recorded_at)
+ .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
- context 'development and test environment' do
- it 'raises Gitlab::Usage::Metrics::Aggregates::DisallowedAggregationTimeFrame' do
- expect { aggregated_metrics_data }.to raise_error namespace::DisallowedAggregationTimeFrame
+ context 'with non prod environment' do
+ it 'raises error' do
+ expect { calculate_count_for_aggregation }.to raise_error expected_error
end
end
- context 'production env' do
- it 'returns fallback value for unsupported time frame' do
+ context 'with prod environment' do
+ before do
stub_rails_env('production')
+ end
- expect(aggregated_metrics_data).to eq('gmau_1' => -1)
+ it 'returns fallback value' do
+ expect(calculate_count_for_aggregation).to be(-1)
end
end
end
end
- context 'legacy aggregated metrics configuration' do
- let(:temp_dir) { Dir.mktmpdir }
- let(:temp_file) { Tempfile.new(%w[common .yml], temp_dir) }
-
- before do
- stub_const("#{namespace}::AGGREGATED_METRICS_PATH", File.expand_path('*.yml', temp_dir))
- File.open(temp_file.path, "w+b") do |file|
- file.write [aggregated_metric(name: "gmau_1", time_frame: '7d')].to_yaml
- end
- end
-
- after do
- temp_file.unlink
- FileUtils.remove_entry(temp_dir) if Dir.exist?(temp_dir)
+ context 'when union data is not available' do
+ subject(:calculate_count_for_aggregation) do
+ described_class
+ .new(recorded_at)
+ .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
end
- it 'allows for YAML aliases in aggregated metrics configs' do
- expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true).at_least(:once)
-
- described_class.new(recorded_at)
+ where(:time_frame, :operator, :datasource) do
+ '28d' | 'OR' | 'redis_hll'
+ '7d' | 'OR' | 'database'
end
- end
-
- describe '.aggregated_metrics_weekly_data' do
- subject(:aggregated_metrics_data) { described_class.new(recorded_at).weekly_data }
- let(:start_date) { 7.days.ago.to_date }
- let(:time_frame) { ['7d'] }
-
- it_behaves_like 'database_sourced_aggregated_metrics'
- it_behaves_like 'redis_sourced_aggregated_metrics'
- end
-
- describe '.aggregated_metrics_monthly_data' do
- subject(:aggregated_metrics_data) { described_class.new(recorded_at).monthly_data }
-
- let(:start_date) { 4.weeks.ago.to_date }
- let(:time_frame) { ['28d'] }
-
- it_behaves_like 'database_sourced_aggregated_metrics'
- it_behaves_like 'redis_sourced_aggregated_metrics'
- end
-
- describe '.calculate_count_for_aggregation' do
- using RSpec::Parameterized::TableSyntax
-
- context 'with valid configuration' do
- where(:number_of_days, :operator, :datasource, :expected_method) do
- 28 | 'AND' | 'redis' | :calculate_metrics_intersections
- 7 | 'AND' | 'redis' | :calculate_metrics_intersections
- 28 | 'AND' | 'database' | :calculate_metrics_intersections
- 7 | 'AND' | 'database' | :calculate_metrics_intersections
- 28 | 'OR' | 'redis' | :calculate_metrics_union
- 7 | 'OR' | 'redis' | :calculate_metrics_union
- 28 | 'OR' | 'database' | :calculate_metrics_union
- 7 | 'OR' | 'database' | :calculate_metrics_union
- end
-
- with_them do
- let(:time_frame) { "#{number_of_days}d" }
- let(:start_date) { number_of_days.days.ago.to_date }
- let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
- let(:aggregate) do
- {
- source: datasource,
- operator: operator,
- events: %w[event1 event2]
- }
- end
-
- subject(:calculate_count_for_aggregation) do
- described_class
- .new(recorded_at)
- .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
- end
-
- it 'returns the number of unique events for aggregation', :aggregate_failures do
- expect(namespace::SOURCES[datasource])
- .to receive(expected_method)
- .with(params.merge(metric_names: %w[event1 event2]))
- .and_return(5)
- expect(calculate_count_for_aggregation).to eq(5)
- end
+ with_them do
+ before do
+ allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
end
- end
- context 'with invalid configuration' do
- where(:time_frame, :operator, :datasource, :expected_error) do
- '28d' | 'SUM' | 'redis' | namespace::UnknownAggregationOperator
- '7d' | 'AND' | 'mongodb' | namespace::UnknownAggregationSource
- 'all' | 'AND' | 'redis' | namespace::DisallowedAggregationTimeFrame
+ let(:aggregate) do
+ {
+ source: datasource,
+ operator: operator,
+ events: %w[event1 event2]
+ }
end
- with_them do
- let(:aggregate) do
- {
- source: datasource,
- operator: operator,
- events: %w[event1 event2]
- }
- end
-
- subject(:calculate_count_for_aggregation) do
- described_class
- .new(recorded_at)
- .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
- end
-
- context 'with non prod environment' do
- it 'raises error' do
- expect { calculate_count_for_aggregation }.to raise_error expected_error
- end
- end
-
- context 'with prod environment' do
- before do
- stub_rails_env('production')
- end
-
- it 'returns fallback value' do
- expect(calculate_count_for_aggregation).to be(-1)
- end
+ context 'with non prod environment' do
+ it 'raises error' do
+ expect { calculate_count_for_aggregation }.to raise_error sources::UnionNotAvailable
end
end
- end
-
- context 'when union data is not available' do
- subject(:calculate_count_for_aggregation) do
- described_class
- .new(recorded_at)
- .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
- end
- where(:time_frame, :operator, :datasource) do
- '28d' | 'OR' | 'redis'
- '7d' | 'OR' | 'database'
- end
-
- with_them do
+ context 'with prod environment' do
before do
- allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
- end
-
- let(:aggregate) do
- {
- source: datasource,
- operator: operator,
- events: %w[event1 event2]
- }
- end
-
- context 'with non prod environment' do
- it 'raises error' do
- expect { calculate_count_for_aggregation }.to raise_error sources::UnionNotAvailable
- end
+ stub_rails_env('production')
end
- context 'with prod environment' do
- before do
- stub_rails_env('production')
- end
-
- it 'returns fallback value' do
- expect(calculate_count_for_aggregation).to be(-1)
- end
+ it 'returns fallback value' do
+ expect(calculate_count_for_aggregation).to be(-1)
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_merge_request_authors_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_merge_request_authors_metric_spec.rb
new file mode 100644
index 00000000000..92459e92eac
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_merge_request_authors_metric_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountMergeRequestAuthorsMetric do
+ let(:expected_value) { 1 }
+ let(:start) { 30.days.ago.to_s(:db) }
+ let(:finish) { 2.days.ago.to_s(:db) }
+
+ let(:expected_query) do
+ "SELECT COUNT(DISTINCT \"merge_requests\".\"author_id\") FROM \"merge_requests\"" \
+ " WHERE \"merge_requests\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
+ end
+
+ before do
+ user = create(:user)
+ user2 = create(:user)
+
+ create(:merge_request, created_at: 1.year.ago, author: user)
+ create(:merge_request, created_at: 1.week.ago, author: user2)
+ create(:merge_request, created_at: 1.week.ago, author: user2)
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
index f73155642d6..f1ecc8c8ab5 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -3,12 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
+ let(:database_metric_class) { Class.new(described_class) }
+
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :count
- metric_class.start { metric_class.relation.minimum(:id) }
- metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.start { Issue.minimum(:id) }
+ metric_class.finish { Issue.maximum(:id) }
end.new(time_frame: 'all')
end
@@ -38,11 +40,11 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with metric options specified with custom batch_size' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :count
- metric_class.start { metric_class.relation.minimum(:id) }
- metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.start { Issue.minimum(:id) }
+ metric_class.finish { Issue.maximum(:id) }
metric_class.metric_options { { batch_size: 12345 } }
end.new(time_frame: 'all')
end
@@ -60,7 +62,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with start and finish not called' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :count
end.new(time_frame: 'all')
@@ -73,7 +75,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with availability defined' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :count
metric_class.available? { false }
@@ -87,7 +89,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with availability not defined' do
subject do
- Class.new(described_class) do
+ database_metric_class do
relation { Issue }
operation :count
end.new(time_frame: 'all')
@@ -100,11 +102,11 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with cache_start_and_finish_as called' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :count
- metric_class.start { metric_class.relation.minimum(:id) }
- metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.start { Issue.minimum(:id) }
+ metric_class.finish { Issue.maximum(:id) }
metric_class.cache_start_and_finish_as :special_issue_count
end.new(time_frame: 'all')
end
@@ -123,11 +125,11 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with estimate_batch_distinct_count' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation(:estimate_batch_distinct_count)
- metric_class.start { metric_class.relation.minimum(:id) }
- metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.start { Issue.minimum(:id) }
+ metric_class.finish { Issue.maximum(:id) }
end.new(time_frame: 'all')
end
@@ -139,13 +141,13 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
let(:buckets) { double('Buckets').as_null_object }
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation(:estimate_batch_distinct_count) do |result|
result.foo
end
- metric_class.start { metric_class.relation.minimum(:id) }
- metric_class.finish { metric_class.relation.maximum(:id) }
+ metric_class.start { Issue.minimum(:id) }
+ metric_class.finish { Issue.maximum(:id) }
end.new(time_frame: 'all')
end
@@ -163,7 +165,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with custom timestamp column' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :count
metric_class.timestamp_column :last_edited_at
@@ -171,6 +173,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
end
it 'calculates a correct result' do
+ create(:issue, last_edited_at: 40.days.ago)
create(:issue, last_edited_at: 5.days.ago)
expect(subject.value).to eq(1)
@@ -179,24 +182,40 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
context 'with default timestamp column' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :count
end.new(time_frame: '28d')
end
it 'calculates a correct result' do
- create(:issue, last_edited_at: 5.days.ago)
+ create(:issue, created_at: 40.days.ago)
create(:issue, created_at: 5.days.ago)
expect(subject.value).to eq(1)
end
end
+
+ context 'with additional parameters passed via options' do
+ subject do
+ database_metric_class.tap do |metric_class|
+ metric_class.relation ->(options) { Issue.where(confidential: options[:confidential]) }
+ metric_class.operation :count
+ end.new(time_frame: '28d', options: { confidential: true })
+ end
+
+ it 'calculates a correct result' do
+ create(:issue, created_at: 5.days.ago, confidential: true)
+ create(:issue, created_at: 5.days.ago, confidential: false)
+
+ expect(subject.value).to eq(1)
+ end
+ end
end
context 'with unimplemented operation method used' do
subject do
- described_class.tap do |metric_class|
+ database_metric_class.tap do |metric_class|
metric_class.relation { Issue }
metric_class.operation :invalid_operation
end.new(time_frame: 'all')
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb
deleted file mode 100644
index 757adee6117..00000000000
--- a/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DistinctCountProjectsWithExpirationPolicyDisabledMetric do
- before_all do
- create(:container_expiration_policy, enabled: false)
- create(:container_expiration_policy, enabled: false, created_at: 29.days.ago)
- create(:container_expiration_policy, enabled: true)
- end
-
- it_behaves_like 'a correct instrumented metric value', { time_frame: '28d' } do
- let(:expected_value) { 1 }
- end
-
- it_behaves_like 'a correct instrumented metric value', { time_frame: 'all' } do
- let(:expected_value) { 2 }
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_metric_spec.rb
new file mode 100644
index 00000000000..a1ca658a0d7
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_metric_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DistinctCountProjectsWithExpirationPolicyMetric do
+ before_all do
+ create(:container_expiration_policy, enabled: false)
+ create(:container_expiration_policy, enabled: false, created_at: 29.days.ago)
+ create(:container_expiration_policy, keep_n: nil)
+ create(:container_expiration_policy, keep_n: 5, enabled: true)
+ create(:container_expiration_policy, keep_n: 5, enabled: true)
+ create(:container_expiration_policy, keep_n: 5, enabled: true)
+ create(:container_expiration_policy, older_than: '7d')
+ create(:container_expiration_policy, cadence: '14d')
+ create(:container_expiration_policy, enabled: true)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', options: { enabled: false } } do
+ let(:expected_value) { 1 }
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', options: { enabled: false } } do
+ let(:expected_value) { 2 }
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', options: { keep_n: 5, enabled: true } } do
+ let(:expected_value) { 3 }
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', options: { cadence: '14d' } } do
+ let(:expected_value) { 1 }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_period_setting_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_period_setting_metric_spec.rb
new file mode 100644
index 00000000000..a63616aeb48
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_period_setting_metric_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DormantUserPeriodSettingMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:deactivate_dormant_users_period_value, :expected_value) do
+ 90 | 90 # default
+ 365 | 365
+ end
+
+ with_them do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ stub_application_setting(deactivate_dormant_users_period: deactivate_dormant_users_period_value)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', {}
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_setting_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_setting_enabled_metric_spec.rb
new file mode 100644
index 00000000000..5c8ca502f82
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/dormant_user_setting_enabled_metric_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DormantUserSettingEnabledMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:deactivate_dormant_users_enabled, :expected_value) do
+ 1 | 1
+ 0 | 0
+ end
+
+ with_them do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ stub_application_setting(deactivate_dormant_users: deactivate_dormant_users_enabled)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', {}
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
new file mode 100644
index 00000000000..cb94da11d58
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_cta_clicked_metric_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailCtaClickedMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:email_attributes) { { cta_clicked_at: Date.yesterday, track: 'verify', series: 0 } }
+ let(:options) { { track: 'verify', series: 0 } }
+ let(:expected_value) { 2 }
+ let(:expected_query) do
+ 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails"' \
+ ' WHERE "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL' \
+ ' AND "in_product_marketing_emails"."series" = 0'\
+ ' AND "in_product_marketing_emails"."track" = 1'
+ end
+
+ before do
+ create_list :in_product_marketing_email, 2, email_attributes
+
+ create :in_product_marketing_email, email_attributes.merge(cta_clicked_at: nil)
+ create :in_product_marketing_email, email_attributes.merge(track: 'team')
+ create :in_product_marketing_email, email_attributes.merge(series: 1)
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', {
+ options: { track: 'verify', series: 0 },
+ time_frame: 'all'
+ }
+
+ where(:options_key, :valid_value, :invalid_value) do
+ :track | 'admin_verify' | 'invite_team'
+ :series | 1 | 5
+ end
+
+ with_them do
+ it "raises an exception if option is not present" do
+ expect do
+ described_class.new(options: options.except(options_key), time_frame: 'all')
+ end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
+ end
+
+ it "raises an exception if option has invalid value" do
+ expect do
+ options[options_key] = invalid_value
+ described_class.new(options: options, time_frame: 'all')
+ end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
+ end
+
+ it "doesn't raise exceptions if option has valid value" do
+ options[options_key] = valid_value
+ described_class.new(options: options, time_frame: 'all')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
new file mode 100644
index 00000000000..0cc82773d56
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/in_product_marketing_email_sent_metric_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailSentMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:email_attributes) { { track: 'verify', series: 0 } }
+ let(:expected_value) { 2 }
+ let(:expected_query) do
+ 'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails"' \
+ ' WHERE "in_product_marketing_emails"."series" = 0'\
+ ' AND "in_product_marketing_emails"."track" = 1'
+ end
+
+ before do
+ create_list :in_product_marketing_email, 2, email_attributes
+
+ create :in_product_marketing_email, email_attributes.merge(track: 'team')
+ create :in_product_marketing_email, email_attributes.merge(series: 1)
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', {
+ options: { track: 'verify', series: 0 },
+ time_frame: 'all'
+ }
+
+ where(:options_key, :valid_value, :invalid_value) do
+ :track | 'admin_verify' | 'invite_team'
+ :series | 1 | 5
+ end
+
+ with_them do
+ it "raises an exception if option is not present" do
+ expect do
+ described_class.new(options: email_attributes.except(options_key), time_frame: 'all')
+ end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
+ end
+
+ it "raises an exception if option has invalid value" do
+ expect do
+ email_attributes[options_key] = invalid_value
+ described_class.new(options: email_attributes, time_frame: 'all')
+ end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
+ end
+
+ it "doesn't raise exceptions if option has valid value" do
+ email_attributes[options_key] = valid_value
+ described_class.new(options: email_attributes, time_frame: 'all')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb
index 3e315692d0a..35e5d7f2796 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb
@@ -15,6 +15,8 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::WorkItemsActivityAggreg
users_creating_work_items
users_updating_work_item_title
users_updating_work_item_dates
+ users_updating_work_item_labels
+ users_updating_work_item_milestone
users_updating_work_item_iteration
]
}
@@ -44,16 +46,26 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::WorkItemsActivityAggreg
describe '#value', :clean_gitlab_redis_shared_state do
let(:counter) { Gitlab::UsageDataCounters::HLLRedisCounter }
+ let(:author1_id) { 1 }
+ let(:author2_id) { 2 }
+ let(:event_time) { 1.week.ago }
before do
- counter.track_event(:users_creating_work_items, values: 1, time: 1.week.ago)
- counter.track_event(:users_updating_work_item_title, values: 1, time: 1.week.ago)
- counter.track_event(:users_updating_work_item_dates, values: 2, time: 1.week.ago)
- counter.track_event(:users_updating_work_item_iteration, values: 2, time: 1.week.ago)
+ counter.track_event(:users_creating_work_items, values: author1_id, time: event_time)
end
- it 'has correct value' do
- expect(described_class.new(metric_definition).value).to eq 2
+ it 'has correct value after events are tracked', :aggregate_failures do
+ expect do
+ counter.track_event(:users_updating_work_item_title, values: author1_id, time: event_time)
+ counter.track_event(:users_updating_work_item_dates, values: author1_id, time: event_time)
+ counter.track_event(:users_updating_work_item_labels, values: author1_id, time: event_time)
+ counter.track_event(:users_updating_work_item_milestone, values: author1_id, time: event_time)
+ end.to not_change { described_class.new(metric_definition).value }
+
+ expect do
+ counter.track_event(:users_updating_work_item_iteration, values: author2_id, time: event_time)
+ counter.track_event(:users_updating_weight_estimate, values: author1_id, time: event_time)
+ end.to change { described_class.new(metric_definition).value }.from(1).to(2)
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
index f9cd6e88e0a..24107727a8e 100644
--- a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
@@ -63,7 +63,6 @@ RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
context 'for sum metrics' do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with sum(JiraImportState.finished, :imported_issues_count)
- let(:key_path) { 'counts.jira_imports_total_imported_issues_count' }
let(:operation) { :sum }
let(:relation) { JiraImportState.finished }
let(:column) { :imported_issues_count }
@@ -74,7 +73,6 @@ RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
context 'for average metrics' do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with average(Ci::Pipeline, :duration)
- let(:key_path) { 'counts.ci_pipeline_duration' }
let(:operation) { :average }
let(:relation) { Ci::Pipeline }
let(:column) { :duration }
@@ -100,5 +98,16 @@ RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
let(:name_suggestion) { /<please fill metric name>/ }
end
end
+
+ context 'for metrics with `having` keyword' do
+ it_behaves_like 'name suggestion' do
+ let(:operation) { :count }
+ let(:relation) { Issue.with_alert_management_alerts.having('COUNT(alert_management_alerts) > 1').group(:id) }
+
+ let(:column) { nil }
+ let(:constraints) { /<adjective describing: '\(\(COUNT\(alert_management_alerts\) > 1\)\)'>/ }
+ let(:name_suggestion) { /count_#{constraints}_issues_<with>_alert_management_alerts/ }
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb
new file mode 100644
index 00000000000..492acf2a902
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/having_constraints_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::HavingConstraints do
+ describe '#accept' do
+ let(:connection) { ApplicationRecord.connection }
+ let(:collector) { Arel::Collectors::SubstituteBinds.new(connection, Arel::Collectors::SQLString.new) }
+
+ it 'builds correct constraints description' do
+ table = Arel::Table.new('records')
+ havings = table[:attribute].sum.eq(6).and(table[:attribute].count.gt(5))
+ arel = table.from.project(table['id'].count).having(havings).group(table[:attribute2])
+ described_class.new(connection).accept(arel, collector)
+
+ expect(collector.value).to eql '(SUM(records.attribute) = 6 AND COUNT(records.attribute) > 5)'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/where_constraints_spec.rb
index 68016e760e4..42a776478a4 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/where_constraints_spec.rb
@@ -2,14 +2,15 @@
require 'spec_helper'
-RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::Constraints do
+RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::WhereConstraints do
describe '#accept' do
- let(:collector) { Arel::Collectors::SubstituteBinds.new(ActiveRecord::Base.connection, Arel::Collectors::SQLString.new) }
+ let(:connection) { ApplicationRecord.connection }
+ let(:collector) { Arel::Collectors::SubstituteBinds.new(connection, Arel::Collectors::SQLString.new) }
it 'builds correct constraints description' do
table = Arel::Table.new('records')
arel = table.from.project(table['id'].count).where(table[:attribute].eq(true).and(table[:some_value].gt(5)))
- described_class.new(ApplicationRecord.connection).accept(arel, collector)
+ described_class.new(connection).accept(arel, collector)
expect(collector.value).to eql '(records.attribute = true AND records.some_value > 5)'
end
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index 1ca0bb0e9ea..f1115a8813d 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -12,6 +12,10 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
shared_examples 'tracks template' do
let(:subject) { described_class.track_unique_project_event(project: project, template: template_path, config_source: config_source, user: user) }
+ let(:template_name) do
+ expanded_template_name = described_class.expand_template_name(template_path)
+ described_class.ci_template_event_name(expanded_template_name, config_source)
+ end
it "has an event defined for template" do
expect do
@@ -20,33 +24,18 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
end
it "tracks template" do
- expanded_template_name = described_class.expand_template_name(template_path)
- expected_template_event_name = described_class.ci_template_event_name(expanded_template_name, config_source)
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(expected_template_event_name, values: project.id)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(template_name, values: project.id)
subject
end
- context 'Snowplow' do
- it 'event is not tracked if FF is disabled' do
- stub_feature_flags(route_hll_to_snowplow: false)
-
- subject
-
- expect_no_snowplow_event
- end
-
- it 'tracks event' do
- subject
-
- expect_snowplow_event(
- category: described_class.to_s,
- action: 'ci_templates_unique',
- namespace: project.namespace,
- user: user,
- project: project
- )
- end
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow }
+ let(:category) { described_class.to_s }
+ let(:action) { 'ci_templates_unique' }
+ let(:namespace) { project.namespace }
+ let(:label) { 'redis_hll_counters.ci_templates.ci_templates_total_unique_counts_monthly' }
+ let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: template_name).to_context] }
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index d0b935d59dd..08c712889a8 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
# Without freezing the time, the test may behave inconsistently
# depending on which day of the week test is run.
# Monday 6th of June
+ described_class.clear_memoization(:known_events)
reference_time = Time.utc(2020, 6, 1)
travel_to(reference_time) { example.run }
described_class.clear_memoization(:known_events)
diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
index e7edb8b9cf1..ced9ec7f221 100644
--- a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
@@ -26,6 +26,12 @@ RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 3, kubernetes_agent_k8s_api_proxy_request: 6)
end
+ context 'with empty events' do
+ let(:events) { nil }
+
+ it { expect { subject }.not_to change(described_class, :totals) }
+ end
+
context 'event is unknown' do
let(:events) do
{
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
index 2d251017c87..aaf509b6f81 100644
--- a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -36,4 +36,12 @@ RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_
it_behaves_like 'work item unique counter'
end
+
+ describe '.track_work_item_milestone_changed_action' do
+ subject(:track_event) { described_class.track_work_item_milestone_changed_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_MILESTONE_CHANGED }
+
+ it_behaves_like 'work item unique counter'
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index cb645ae3e53..d8f50fa27bb 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -33,8 +33,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
.not_to include(:merge_requests_users)
expect(subject[:usage_activity_by_stage_monthly][:create])
.to include(:merge_requests_users)
- expect(subject[:counts_weekly]).to include(:aggregated_metrics)
- expect(subject[:counts_monthly]).to include(:aggregated_metrics)
end
it 'clears memoized values' do
@@ -608,13 +606,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let_it_be(:disabled) { create(:container_expiration_policy, enabled: false) }
let_it_be(:enabled) { create(:container_expiration_policy, enabled: true) }
- %i[keep_n cadence older_than].each do |attribute|
- ContainerExpirationPolicy.send("#{attribute}_options").keys.each do |value|
- let_it_be("container_expiration_policy_with_#{attribute}_set_to_#{value}") { create(:container_expiration_policy, attribute => value) }
- end
+ ::ContainerExpirationPolicy.older_than_options.keys.each do |value|
+ let_it_be("container_expiration_policy_with_older_than_set_to_#{value}") { create(:container_expiration_policy, older_than: value) }
end
- let_it_be('container_expiration_policy_with_keep_n_set_to_null') { create(:container_expiration_policy, keep_n: nil) }
let_it_be('container_expiration_policy_with_older_than_set_to_null') { create(:container_expiration_policy, older_than: nil) }
let(:inactive_policies) { ::ContainerExpirationPolicy.where(enabled: false) }
@@ -623,27 +618,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.data[:counts] }
it 'gathers usage data' do
- expect(subject[:projects_with_expiration_policy_enabled]).to eq 19
-
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_5]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 13
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_25]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_50]).to eq 1
-
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_unset]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_7d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_14d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_30d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_60d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 14
-
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 15
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_7d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_14d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1month]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_3month]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 2
end
end
@@ -757,33 +737,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- describe '.usage_data_counters' do
- subject { described_class.usage_data_counters }
-
- it { is_expected.to all(respond_to :totals) }
- it { is_expected.to all(respond_to :fallback_totals) }
-
- describe 'the results of calling #totals on all objects in the array' do
- subject { described_class.usage_data_counters.map(&:totals) }
-
- it { is_expected.to all(be_a Hash) }
- it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer))) }
- end
-
- describe 'the results of calling #fallback_totals on all objects in the array' do
- subject { described_class.usage_data_counters.map(&:fallback_totals) }
-
- it { is_expected.to all(be_a Hash) }
- it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(eq(-1)))) }
- end
-
- it 'does not have any conflicts' do
- all_keys = subject.flat_map { |counter| counter.totals.keys }
-
- expect(all_keys.size).to eq all_keys.to_set.size
- end
- end
-
describe '.license_usage_data' do
subject { described_class.license_usage_data }
@@ -1107,10 +1060,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
context 'snowplow stats' do
- before do
- stub_feature_flags(usage_data_instrumentation: false)
- end
-
it 'gathers snowplow stats' do
expect(subject[:settings][:snowplow_enabled]).to eq(Gitlab::CurrentSettings.snowplow_enabled?)
expect(subject[:settings][:snowplow_configured_to_gitlab_collector]).to eq(snowplow_gitlab_host?)
@@ -1159,20 +1108,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:project) { build(:project) }
before do
- counter = Gitlab::UsageDataCounters::TrackUniqueEvents
- project_type = Event::TARGET_TYPES[:project]
- wiki = Event::TARGET_TYPES[:wiki]
- design = Event::TARGET_TYPES[:design]
-
- counter.track_event(event_action: :pushed, event_target: project_type, author_id: 1)
- counter.track_event(event_action: :pushed, event_target: project_type, author_id: 1)
- counter.track_event(event_action: :pushed, event_target: project_type, author_id: 2)
- counter.track_event(event_action: :pushed, event_target: project_type, author_id: 3)
- counter.track_event(event_action: :pushed, event_target: project_type, author_id: 4, time: time - 3.days)
- counter.track_event(event_action: :created, event_target: wiki, author_id: 3)
- counter.track_event(event_action: :created, event_target: design, author_id: 3)
- counter.track_event(event_action: :created, event_target: design, author_id: 4)
-
counter = Gitlab::UsageDataCounters::EditorUniqueCounter
counter.track_web_ide_edit_action(author: user1, project: project)
@@ -1191,10 +1126,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'returns the distinct count of user actions within the specified time period' do
expect(described_class.action_monthly_active_users(time_period)).to eq(
{
- action_monthly_active_users_design_management: 2,
- action_monthly_active_users_project_repo: 3,
- action_monthly_active_users_wiki_repo: 1,
- action_monthly_active_users_git_write: 4,
action_monthly_active_users_web_ide_edit: 2,
action_monthly_active_users_sfe_edit: 2,
action_monthly_active_users_snippet_editor_edit: 2,
@@ -1234,23 +1165,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- describe '.aggregated_metrics_data' do
- it 'uses ::Gitlab::Usage::Metrics::Aggregates::Aggregate methods', :aggregate_failures do
- expected_payload = {
- counts_weekly: { aggregated_metrics: { global_search_gmau: 123 } },
- counts_monthly: { aggregated_metrics: { global_search_gmau: 456 } },
- counts: { aggregate_global_search_gmau: 789 }
- }
-
- expect_next_instance_of(::Gitlab::Usage::Metrics::Aggregates::Aggregate) do |instance|
- expect(instance).to receive(:weekly_data).and_return(global_search_gmau: 123)
- expect(instance).to receive(:monthly_data).and_return(global_search_gmau: 456)
- expect(instance).to receive(:all_time_data).and_return(global_search_gmau: 789)
- end
- expect(described_class.aggregated_metrics_data).to eq(expected_payload)
- end
- end
-
describe '.service_desk_counts' do
subject { described_class.send(:service_desk_counts) }
@@ -1264,110 +1178,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- describe '.email_campaign_counts' do
- subject { described_class.send(:email_campaign_counts) }
-
- context 'when queries time out' do
- before do
- allow_any_instance_of(ActiveRecord::Relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid)
- allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
- end
-
- context 'with should_raise_for_dev? true' do
- let(:should_raise_for_dev) { true }
-
- it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
- end
- end
-
- context 'with should_raise_for_dev? false' do
- let(:should_raise_for_dev) { false }
-
- it 'returns -1 for email campaign data' do
- expected_data = {
- "in_product_marketing_email_create_0_sent" => -1,
- "in_product_marketing_email_create_0_cta_clicked" => -1,
- "in_product_marketing_email_create_1_sent" => -1,
- "in_product_marketing_email_create_1_cta_clicked" => -1,
- "in_product_marketing_email_create_2_sent" => -1,
- "in_product_marketing_email_create_2_cta_clicked" => -1,
- "in_product_marketing_email_team_short_0_sent" => -1,
- "in_product_marketing_email_team_short_0_cta_clicked" => -1,
- "in_product_marketing_email_trial_short_0_sent" => -1,
- "in_product_marketing_email_trial_short_0_cta_clicked" => -1,
- "in_product_marketing_email_admin_verify_0_sent" => -1,
- "in_product_marketing_email_admin_verify_0_cta_clicked" => -1,
- "in_product_marketing_email_verify_0_sent" => -1,
- "in_product_marketing_email_verify_0_cta_clicked" => -1,
- "in_product_marketing_email_verify_1_sent" => -1,
- "in_product_marketing_email_verify_1_cta_clicked" => -1,
- "in_product_marketing_email_verify_2_sent" => -1,
- "in_product_marketing_email_verify_2_cta_clicked" => -1,
- "in_product_marketing_email_trial_0_sent" => -1,
- "in_product_marketing_email_trial_0_cta_clicked" => -1,
- "in_product_marketing_email_trial_1_sent" => -1,
- "in_product_marketing_email_trial_1_cta_clicked" => -1,
- "in_product_marketing_email_trial_2_sent" => -1,
- "in_product_marketing_email_trial_2_cta_clicked" => -1,
- "in_product_marketing_email_team_0_sent" => -1,
- "in_product_marketing_email_team_0_cta_clicked" => -1,
- "in_product_marketing_email_team_1_sent" => -1,
- "in_product_marketing_email_team_1_cta_clicked" => -1,
- "in_product_marketing_email_team_2_sent" => -1,
- "in_product_marketing_email_team_2_cta_clicked" => -1
- }
-
- expect(subject).to eq(expected_data)
- end
- end
- end
-
- context 'when there are entries' do
- before do
- create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.zone.now)
- create(:in_product_marketing_email, track: :verify, series: 0)
- end
-
- it 'gathers email campaign data' do
- expected_data = {
- "in_product_marketing_email_create_0_sent" => 1,
- "in_product_marketing_email_create_0_cta_clicked" => 1,
- "in_product_marketing_email_create_1_sent" => 0,
- "in_product_marketing_email_create_1_cta_clicked" => 0,
- "in_product_marketing_email_create_2_sent" => 0,
- "in_product_marketing_email_create_2_cta_clicked" => 0,
- "in_product_marketing_email_team_short_0_sent" => 0,
- "in_product_marketing_email_team_short_0_cta_clicked" => 0,
- "in_product_marketing_email_trial_short_0_sent" => 0,
- "in_product_marketing_email_trial_short_0_cta_clicked" => 0,
- "in_product_marketing_email_admin_verify_0_sent" => 0,
- "in_product_marketing_email_admin_verify_0_cta_clicked" => 0,
- "in_product_marketing_email_verify_0_sent" => 1,
- "in_product_marketing_email_verify_0_cta_clicked" => 0,
- "in_product_marketing_email_verify_1_sent" => 0,
- "in_product_marketing_email_verify_1_cta_clicked" => 0,
- "in_product_marketing_email_verify_2_sent" => 0,
- "in_product_marketing_email_verify_2_cta_clicked" => 0,
- "in_product_marketing_email_trial_0_sent" => 0,
- "in_product_marketing_email_trial_0_cta_clicked" => 0,
- "in_product_marketing_email_trial_1_sent" => 0,
- "in_product_marketing_email_trial_1_cta_clicked" => 0,
- "in_product_marketing_email_trial_2_sent" => 0,
- "in_product_marketing_email_trial_2_cta_clicked" => 0,
- "in_product_marketing_email_team_0_sent" => 0,
- "in_product_marketing_email_team_0_cta_clicked" => 0,
- "in_product_marketing_email_team_1_sent" => 0,
- "in_product_marketing_email_team_1_cta_clicked" => 0,
- "in_product_marketing_email_team_2_sent" => 0,
- "in_product_marketing_email_team_2_cta_clicked" => 0
- }
-
- expect(subject).to eq(expected_data)
- end
- end
- end
-
describe ".with_duration" do
it 'records duration' do
expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator)
diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb
index cb03797b3d9..236b6d29ba7 100644
--- a/spec/lib/gitlab/utils/strong_memoize_spec.rb
+++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb
@@ -35,16 +35,23 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
strong_memoize_attr :method_name_attr
- strong_memoize_attr :different_method_name_attr, :different_member_name_attr
def different_method_name_attr
trace << value
value
end
+ strong_memoize_attr :different_method_name_attr, :different_member_name_attr
- strong_memoize_attr :enabled?
def enabled?
true
end
+ strong_memoize_attr :enabled?
+
+ def method_name_with_args(*args)
+ strong_memoize_with(:method_name_with_args, args) do
+ trace << [value, args]
+ value
+ end
+ end
def trace
@trace ||= []
@@ -141,6 +148,36 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
end
+ describe '#strong_memoize_with' do
+ [nil, false, true, 'value', 0, [0]].each do |value|
+ context "with value #{value}" do
+ let(:value) { value }
+
+ it 'only calls the block once' do
+ value0 = object.method_name_with_args(1)
+ value1 = object.method_name_with_args(1)
+ value2 = object.method_name_with_args([2, 3])
+ value3 = object.method_name_with_args([2, 3])
+
+ expect(value0).to eq(value)
+ expect(value1).to eq(value)
+ expect(value2).to eq(value)
+ expect(value3).to eq(value)
+
+ expect(object.trace).to contain_exactly([value, [1]], [value, [[2, 3]]])
+ end
+
+ it 'returns and defines the instance variable for the exact value' do
+ returned_value = object.method_name_with_args(1, 2, 3)
+ memoized_value = object.instance_variable_get(:@method_name_with_args)
+
+ expect(returned_value).to eql(value)
+ expect(memoized_value).to eql({ [[1, 2, 3]] => value })
+ end
+ end
+ end
+ end
+
describe '#strong_memoized?' do
let(:value) { :anything }
@@ -227,5 +264,21 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
expect(klass.public_instance_methods).to include(:public_method)
end
end
+
+ context "when method doesn't exist" do
+ let(:klass) do
+ strong_memoize_class = described_class
+
+ Struct.new(:value) do
+ include strong_memoize_class
+ end
+ end
+
+ subject { klass.strong_memoize_attr(:nonexistent_method) }
+
+ it 'fails when strong-memoizing a nonexistent method' do
+ expect { subject }.to raise_error(NameError, %r{undefined method `nonexistent_method' for class})
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index d1fdaf7a9db..80b2ec63af9 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -63,9 +63,21 @@ RSpec.describe Gitlab::Utils do
expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
end
- it 'does nothing for a non-string' do
+ it 'does nothing for nil' do
expect(check_path_traversal!(nil)).to be_nil
end
+
+ it 'does nothing for safe HashedPath' do
+ expect(check_path_traversal!(Gitlab::HashedPath.new('tmp', root_hash: 1))).to eq '6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b/tmp'
+ end
+
+ it 'raises for unsafe HashedPath' do
+ expect { check_path_traversal!(Gitlab::HashedPath.new('tmp', '..', 'etc', 'passwd', root_hash: 1)) }.to raise_error(/Invalid path/)
+ end
+
+ it 'raises for other non-strings' do
+ expect { check_path_traversal!(%w[/tmp /tmp/../etc/passwd]) }.to raise_error(/Invalid path/)
+ end
end
describe '.check_allowed_absolute_path_and_path_traversal!' do
diff --git a/spec/lib/gitlab/webpack/file_loader_spec.rb b/spec/lib/gitlab/webpack/file_loader_spec.rb
index 6475ef58611..c2e9cd8124d 100644
--- a/spec/lib/gitlab/webpack/file_loader_spec.rb
+++ b/spec/lib/gitlab/webpack/file_loader_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe Gitlab::Webpack::FileLoader do
stub_request(:get, "http://hostname:2000/public_path/#{error_file_path}").to_raise(StandardError)
end
- it "returns content when respondes succesfully" do
- expect(Gitlab::Webpack::FileLoader.load(file_path)).to be(file_contents)
+ it "returns content when responds successfully" do
+ expect(Gitlab::Webpack::FileLoader.load(file_path)).to eq(file_contents)
end
it "raises error when 404" do