Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-11-18 16:16:36 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-11-18 16:16:36 +0300
commit311b0269b4eb9839fa63f80c8d7a58f32b8138a0 (patch)
tree07e7870bca8aed6d61fdcc810731c50d2c40af47 /lib/gitlab
parent27909cef6c4170ed9205afa7426b8d3de47cbb0c (diff)
Add latest changes from gitlab-org/gitlab@14-5-stable-eev14.5.0-rc42
Diffstat (limited to 'lib/gitlab')
-rw-r--r--lib/gitlab/access.rb30
-rw-r--r--lib/gitlab/action_cable/config.rb4
-rw-r--r--lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder.rb114
-rw-r--r--lib/gitlab/analytics/cycle_analytics/aggregated/data_collector.rb54
-rw-r--r--lib/gitlab/analytics/cycle_analytics/aggregated/label_filter.rb31
-rw-r--r--lib/gitlab/analytics/cycle_analytics/aggregated/median.rb36
-rw-r--r--lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher.rb116
-rw-r--r--lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb41
-rw-r--r--lib/gitlab/analytics/cycle_analytics/data_collector.rb26
-rw-r--r--lib/gitlab/analytics/cycle_analytics/records_fetcher.rb18
-rw-r--r--lib/gitlab/analytics/cycle_analytics/request_params.rb3
-rw-r--r--lib/gitlab/application_rate_limiter.rb34
-rw-r--r--lib/gitlab/asciidoc/syntax_highlighter/html_pipeline_adapter.rb6
-rw-r--r--lib/gitlab/auth/o_auth/provider.rb1
-rw-r--r--lib/gitlab/background_migration.rb102
-rw-r--r--lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed.rb58
-rw-r--r--lib/gitlab/background_migration/backfill_design_internal_ids.rb130
-rw-r--r--lib/gitlab/background_migration/backfill_project_repositories.rb2
-rw-r--r--lib/gitlab/background_migration/backfill_user_namespace.rb38
-rw-r--r--lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics.rb25
-rw-r--r--lib/gitlab/background_migration/fix_merge_request_diff_commit_users.rb156
-rw-r--r--lib/gitlab/background_migration/fix_orphan_promoted_issues.rb13
-rw-r--r--lib/gitlab/background_migration/fix_ruby_object_in_audit_events.rb13
-rw-r--r--lib/gitlab/background_migration/job_coordinator.rb134
-rw-r--r--lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys.rb2
-rw-r--r--lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data.rb146
-rw-r--r--lib/gitlab/background_migration/migrate_requirements_to_work_items.rb13
-rw-r--r--lib/gitlab/background_migration/migrate_users_bio_to_user_details.rb32
-rw-r--r--lib/gitlab/background_migration/populate_issue_email_participants.rb2
-rw-r--r--lib/gitlab/background_migration/populate_user_highest_roles_table.rb58
-rw-r--r--lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb151
-rw-r--r--lib/gitlab/background_migration/project_namespaces/models/namespace.rb17
-rw-r--r--lib/gitlab/background_migration/project_namespaces/models/project.rb16
-rw-r--r--lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb20
-rw-r--r--lib/gitlab/background_migration/remove_undefined_occurrence_confidence_level.rb13
-rw-r--r--lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb13
-rw-r--r--lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb13
-rw-r--r--lib/gitlab/background_migration/set_default_iteration_cadences.rb60
-rw-r--r--lib/gitlab/background_migration/set_null_external_diff_store_to_local_value.rb24
-rw-r--r--lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value.rb26
-rw-r--r--lib/gitlab/background_migration/update_vulnerabilities_to_dismissed.rb13
-rw-r--r--lib/gitlab/background_migration/update_vulnerability_confidence.rb13
-rw-r--r--lib/gitlab/background_migration/update_vulnerability_occurrences_location.rb14
-rw-r--r--lib/gitlab/bitbucket_server_import/importer.rb10
-rw-r--r--lib/gitlab/blob_helper.rb2
-rw-r--r--lib/gitlab/ci/artifact_file_reader.rb27
-rw-r--r--lib/gitlab/ci/artifacts/metrics.rb15
-rw-r--r--lib/gitlab/ci/build/auto_retry.rb4
-rw-r--r--lib/gitlab/ci/build/context/base.rb22
-rw-r--r--lib/gitlab/ci/build/image.rb9
-rw-r--r--lib/gitlab/ci/build/rules/rule/clause/exists.rb12
-rw-r--r--lib/gitlab/ci/config.rb31
-rw-r--r--lib/gitlab/ci/config/entry/include/rules/rule.rb4
-rw-r--r--lib/gitlab/ci/config/entry/job.rb8
-rw-r--r--lib/gitlab/ci/config/entry/processable.rb1
-rw-r--r--lib/gitlab/ci/config/entry/service.rb6
-rw-r--r--lib/gitlab/ci/config/external/context.rb14
-rw-r--r--lib/gitlab/ci/parsers/security/common.rb5
-rw-r--r--lib/gitlab/ci/parsers/security/validators/schema_validator.rb2
-rw-r--r--lib/gitlab/ci/parsers/security/validators/schemas/sast-report-format.json (renamed from lib/gitlab/ci/parsers/security/validators/schemas/sast.json)0
-rw-r--r--lib/gitlab/ci/parsers/security/validators/schemas/secret-detection-report-format.json (renamed from lib/gitlab/ci/parsers/security/validators/schemas/secret_detection.json)0
-rw-r--r--lib/gitlab/ci/pipeline/chain/command.rb3
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/process.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/create_cross_database_associations.rb21
-rw-r--r--lib/gitlab/ci/pipeline/metrics.rb9
-rw-r--r--lib/gitlab/ci/pipeline/seed/build.rb14
-rw-r--r--lib/gitlab/ci/pipeline/seed/stage.rb2
-rw-r--r--lib/gitlab/ci/reports/security/finding.rb40
-rw-r--r--lib/gitlab/ci/reports/security/report.rb4
-rw-r--r--lib/gitlab/ci/reports/security/reports.rb19
-rw-r--r--lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml8
-rw-r--r--lib/gitlab/ci/templates/Django.gitlab-ci.yml116
-rw-r--r--lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.latest.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml5
-rw-r--r--lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml8
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml24
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml16
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml12
-rw-r--r--lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml12
-rw-r--r--lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml34
-rw-r--r--lib/gitlab/ci/templates/Kaniko.gitlab-ci.yml47
-rw-r--r--lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Security/Cluster-Image-Scanning.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml6
-rw-r--r--lib/gitlab/ci/templates/Security/SAST-IaC.latest.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Terraform.latest.gitlab-ci.yml7
-rw-r--r--lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml16
-rw-r--r--lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/trace.rb17
-rw-r--r--lib/gitlab/ci/trace/archive.rb2
-rw-r--r--lib/gitlab/ci/trace/metrics.rb10
-rw-r--r--lib/gitlab/ci/variables/builder.rb49
-rw-r--r--lib/gitlab/ci/variables/collection.rb4
-rw-r--r--lib/gitlab/ci/yaml_processor/result.rb1
-rw-r--r--lib/gitlab/config_checker/external_database_checker.rb4
-rw-r--r--lib/gitlab/container_repository/tags/cache.rb72
-rw-r--r--lib/gitlab/content_security_policy/config_loader.rb49
-rw-r--r--lib/gitlab/content_security_policy/directives.rb21
-rw-r--r--lib/gitlab/contributions_calendar.rb17
-rw-r--r--lib/gitlab/current_settings.rb2
-rw-r--r--lib/gitlab/cycle_analytics/stage_summary.rb3
-rw-r--r--lib/gitlab/cycle_analytics/summary/base.rb4
-rw-r--r--lib/gitlab/cycle_analytics/summary/deployment_frequency.rb9
-rw-r--r--lib/gitlab/database.rb57
-rw-r--r--lib/gitlab/database/as_with_materialized.rb2
-rw-r--r--lib/gitlab/database/async_indexes/index_creator.rb2
-rw-r--r--lib/gitlab/database/async_indexes/postgres_async_index.rb2
-rw-r--r--lib/gitlab/database/background_migration_job.rb1
-rw-r--r--lib/gitlab/database/batch_counter.rb6
-rw-r--r--lib/gitlab/database/connection.rb260
-rw-r--r--lib/gitlab/database/each_database.rb39
-rw-r--r--lib/gitlab/database/gitlab_schema.rb96
-rw-r--r--lib/gitlab/database/gitlab_schemas.yml543
-rw-r--r--lib/gitlab/database/load_balancing.rb2
-rw-r--r--lib/gitlab/database/load_balancing/configuration.rb50
-rw-r--r--lib/gitlab/database/load_balancing/connection_proxy.rb7
-rw-r--r--lib/gitlab/database/load_balancing/load_balancer.rb63
-rw-r--r--lib/gitlab/database/load_balancing/primary_host.rb5
-rw-r--r--lib/gitlab/database/load_balancing/rack_middleware.rb10
-rw-r--r--lib/gitlab/database/load_balancing/setup.rb87
-rw-r--r--lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb10
-rw-r--r--lib/gitlab/database/load_balancing/sticking.rb5
-rw-r--r--lib/gitlab/database/migration_helpers.rb75
-rw-r--r--lib/gitlab/database/migration_helpers/cascading_namespace_settings.rb4
-rw-r--r--lib/gitlab/database/migrations/observation.rb3
-rw-r--r--lib/gitlab/database/migrations/observers.rb3
-rw-r--r--lib/gitlab/database/migrations/observers/transaction_duration.rb42
-rw-r--r--lib/gitlab/database/partitioning.rb83
-rw-r--r--lib/gitlab/database/partitioning/detached_partition_dropper.rb96
-rw-r--r--lib/gitlab/database/partitioning/monthly_strategy.rb4
-rw-r--r--lib/gitlab/database/partitioning/multi_database_partition_dropper.rb35
-rw-r--r--lib/gitlab/database/partitioning/multi_database_partition_manager.rb37
-rw-r--r--lib/gitlab/database/partitioning/partition_monitoring.rb18
-rw-r--r--lib/gitlab/database/partitioning/replace_table.rb7
-rw-r--r--lib/gitlab/database/partitioning/time_partition.rb2
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb4
-rw-r--r--lib/gitlab/database/postgres_foreign_key.rb6
-rw-r--r--lib/gitlab/database/postgres_hll/batch_distinct_counter.rb6
-rw-r--r--lib/gitlab/database/postgres_index.rb3
-rw-r--r--lib/gitlab/database/postgres_index_bloat_estimate.rb2
-rw-r--r--lib/gitlab/database/query_analyzer.rb129
-rw-r--r--lib/gitlab/database/query_analyzers/base.rb53
-rw-r--r--lib/gitlab/database/query_analyzers/gitlab_schemas_metrics.rb46
-rw-r--r--lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb119
-rw-r--r--lib/gitlab/database/reflection.rb115
-rw-r--r--lib/gitlab/database/reindexing.rb41
-rw-r--r--lib/gitlab/database/reindexing/index_selection.rb6
-rw-r--r--lib/gitlab/database/reindexing/queued_action.rb21
-rw-r--r--lib/gitlab/database/reindexing/reindex_action.rb2
-rw-r--r--lib/gitlab/database/reindexing/reindex_concurrently.rb10
-rw-r--r--lib/gitlab/database/shared_model.rb8
-rw-r--r--lib/gitlab/database/unidirectional_copy_trigger.rb2
-rw-r--r--lib/gitlab/diff/file.rb29
-rw-r--r--lib/gitlab/diff/highlight.rb3
-rw-r--r--lib/gitlab/diff/highlight_cache.rb2
-rw-r--r--lib/gitlab/diff/position_tracer/line_strategy.rb8
-rw-r--r--lib/gitlab/email/handler/service_desk_handler.rb36
-rw-r--r--lib/gitlab/email/message/in_product_marketing.rb3
-rw-r--r--lib/gitlab/email/message/in_product_marketing/admin_verify.rb4
-rw-r--r--lib/gitlab/email/message/in_product_marketing/base.rb37
-rw-r--r--lib/gitlab/email/message/in_product_marketing/create.rb4
-rw-r--r--lib/gitlab/email/message/in_product_marketing/experience.rb16
-rw-r--r--lib/gitlab/email/message/in_product_marketing/helper.rb9
-rw-r--r--lib/gitlab/email/message/in_product_marketing/invite_team.rb53
-rw-r--r--lib/gitlab/email/message/in_product_marketing/verify.rb4
-rw-r--r--lib/gitlab/email/receiver.rb8
-rw-r--r--lib/gitlab/email/reply_parser.rb12
-rw-r--r--lib/gitlab/emoji.rb68
-rw-r--r--lib/gitlab/etag_caching/router/restful.rb4
-rw-r--r--lib/gitlab/git.rb1
-rw-r--r--lib/gitlab/git/blob.rb3
-rw-r--r--lib/gitlab/git/commit.rb14
-rw-r--r--lib/gitlab/git/repository.rb24
-rw-r--r--lib/gitlab/gitaly_client.rb25
-rw-r--r--lib/gitlab/gitaly_client/commit_service.rb2
-rw-r--r--lib/gitlab/gitaly_client/ref_service.rb25
-rw-r--r--lib/gitlab/github_import/bulk_importing.rb2
-rw-r--r--lib/gitlab/github_import/importer/diff_note_importer.rb138
-rw-r--r--lib/gitlab/github_import/importer/issue_importer.rb2
-rw-r--r--lib/gitlab/github_import/importer/label_links_importer.rb2
-rw-r--r--lib/gitlab/github_import/importer/note_importer.rb2
-rw-r--r--lib/gitlab/github_import/importer/pull_requests_merged_by_importer.rb15
-rw-r--r--lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb2
-rw-r--r--lib/gitlab/github_import/representation/diff_note.rb104
-rw-r--r--lib/gitlab/github_import/representation/diff_notes/suggestion_formatter.rb34
-rw-r--r--lib/gitlab/gon_helper.rb4
-rw-r--r--lib/gitlab/gpg/commit.rb32
-rw-r--r--lib/gitlab/grape_logging/loggers/urgency_logger.rb19
-rw-r--r--lib/gitlab/graphql/known_operations.rb45
-rw-r--r--lib/gitlab/graphql/loaders/full_path_model_loader.rb5
-rw-r--r--lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb2
-rw-r--r--lib/gitlab/graphql/query_analyzers/logger_analyzer.rb50
-rw-r--r--lib/gitlab/graphql/tracers/application_context_tracer.rb40
-rw-r--r--lib/gitlab/graphql/tracers/logger_tracer.rb58
-rw-r--r--lib/gitlab/graphql/tracers/metrics_tracer.rb48
-rw-r--r--lib/gitlab/graphql/tracers/timer_tracer.rb31
-rw-r--r--lib/gitlab/graphql/variables.rb7
-rw-r--r--lib/gitlab/health_checks/metric.rb3
-rw-r--r--lib/gitlab/health_checks/probes/status.rb3
-rw-r--r--lib/gitlab/health_checks/redis/cache_check.rb26
-rw-r--r--lib/gitlab/health_checks/redis/queues_check.rb26
-rw-r--r--lib/gitlab/health_checks/redis/rate_limiting_check.rb26
-rw-r--r--lib/gitlab/health_checks/redis/redis_abstract_check.rb41
-rw-r--r--lib/gitlab/health_checks/redis/redis_check.rb20
-rw-r--r--lib/gitlab/health_checks/redis/sessions_check.rb26
-rw-r--r--lib/gitlab/health_checks/redis/shared_state_check.rb26
-rw-r--r--lib/gitlab/health_checks/redis/trace_chunks_check.rb26
-rw-r--r--lib/gitlab/health_checks/result.rb3
-rw-r--r--lib/gitlab/http.rb2
-rw-r--r--lib/gitlab/i18n.rb18
-rw-r--r--lib/gitlab/import/database_helpers.rb4
-rw-r--r--lib/gitlab/import/metrics.rb6
-rw-r--r--lib/gitlab/import_export/attributes_permitter.rb2
-rw-r--r--lib/gitlab/import_export/base/object_builder.rb12
-rw-r--r--lib/gitlab/import_export/decompressed_archive_size_validator.rb2
-rw-r--r--lib/gitlab/import_export/group/relation_tree_restorer.rb274
-rw-r--r--lib/gitlab/import_export/project/import_export.yml434
-rw-r--r--lib/gitlab/import_export/project/object_builder.rb40
-rw-r--r--lib/gitlab/import_export/project/relation_factory.rb4
-rw-r--r--lib/gitlab/import_export/project/relation_tree_restorer.rb27
-rw-r--r--lib/gitlab/import_export/project/sample/relation_tree_restorer.rb6
-rw-r--r--lib/gitlab/import_export/project/tree_saver.rb36
-rw-r--r--lib/gitlab/import_export/relation_tree_restorer.rb280
-rw-r--r--lib/gitlab/instrumentation/redis_interceptor.rb15
-rw-r--r--lib/gitlab/instrumentation/uploads.rb32
-rw-r--r--lib/gitlab/instrumentation_helper.rb5
-rw-r--r--lib/gitlab/integrations/sti_type.rb2
-rw-r--r--lib/gitlab/issues/rebalancing/state.rb61
-rw-r--r--lib/gitlab/jira/http_client.rb1
-rw-r--r--lib/gitlab/language_detection.rb2
-rw-r--r--lib/gitlab/lfs_token.rb15
-rw-r--r--lib/gitlab/lograge/custom_options.rb17
-rw-r--r--lib/gitlab/merge_requests/merge_commit_message.rb60
-rw-r--r--lib/gitlab/metrics.rb2
-rw-r--r--lib/gitlab/metrics/background_transaction.rb17
-rw-r--r--lib/gitlab/metrics/methods.rb8
-rw-r--r--lib/gitlab/metrics/rails_slis.rb32
-rw-r--r--lib/gitlab/metrics/requests_rack_middleware.rb18
-rw-r--r--lib/gitlab/metrics/samplers/action_cable_sampler.rb23
-rw-r--r--lib/gitlab/metrics/subscribers/action_view.rb2
-rw-r--r--lib/gitlab/metrics/subscribers/external_http.rb2
-rw-r--r--lib/gitlab/metrics/subscribers/rails_cache.rb2
-rw-r--r--lib/gitlab/metrics/transaction.rb25
-rw-r--r--lib/gitlab/metrics/web_transaction.rb25
-rw-r--r--lib/gitlab/middleware/compressed_json.rb66
-rw-r--r--lib/gitlab/middleware/go.rb2
-rw-r--r--lib/gitlab/middleware/query_analyzer.rb15
-rw-r--r--lib/gitlab/middleware/release_env.rb3
-rw-r--r--lib/gitlab/pagination/gitaly_keyset_pager.rb4
-rw-r--r--lib/gitlab/patch/sidekiq_client.rb22
-rw-r--r--lib/gitlab/patch/sidekiq_cron_poller.rb17
-rw-r--r--lib/gitlab/project_template.rb1
-rw-r--r--lib/gitlab/prometheus/queries/validate_query.rb2
-rw-r--r--lib/gitlab/prometheus_client.rb8
-rw-r--r--lib/gitlab/quick_actions/issuable_actions.rb7
-rw-r--r--lib/gitlab/quick_actions/issue_actions.rb31
-rw-r--r--lib/gitlab/quick_actions/issue_and_merge_request_actions.rb14
-rw-r--r--lib/gitlab/redis/hll.rb3
-rw-r--r--lib/gitlab/redis/multi_store.rb215
-rw-r--r--lib/gitlab/redis/wrapper.rb6
-rw-r--r--lib/gitlab/runtime.rb15
-rw-r--r--lib/gitlab/saas.rb4
-rw-r--r--lib/gitlab/search_results.rb5
-rw-r--r--lib/gitlab/setup_helper.rb7
-rw-r--r--lib/gitlab/sidekiq_cluster.rb171
-rw-r--r--lib/gitlab/sidekiq_cluster/cli.rb230
-rw-r--r--lib/gitlab/sidekiq_config.rb34
-rw-r--r--lib/gitlab/sidekiq_config/cli_methods.rb1
-rw-r--r--lib/gitlab/sidekiq_config/worker.rb7
-rw-r--r--lib/gitlab/sidekiq_enq.rb80
-rw-r--r--lib/gitlab/sidekiq_logging/deduplication_logger.rb10
-rw-r--r--lib/gitlab/sidekiq_logging/json_formatter.rb2
-rw-r--r--lib/gitlab/sidekiq_middleware.rb1
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb54
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb2
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/base.rb4
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb15
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed.rb3
-rw-r--r--lib/gitlab/sidekiq_middleware/query_analyzer.rb11
-rw-r--r--lib/gitlab/sidekiq_middleware/size_limiter/validator.rb39
-rw-r--r--lib/gitlab/sidekiq_status.rb6
-rw-r--r--lib/gitlab/slash_commands/result.rb3
-rw-r--r--lib/gitlab/spamcheck/client.rb18
-rw-r--r--lib/gitlab/subscription_portal.rb8
-rw-r--r--lib/gitlab/template_parser/ast.rb3
-rw-r--r--lib/gitlab/testing/request_inspector_middleware.rb4
-rw-r--r--lib/gitlab/tracking.rb27
-rw-r--r--lib/gitlab/tracking/destinations/product_analytics.rb41
-rw-r--r--lib/gitlab/tracking/destinations/snowplow.rb36
-rw-r--r--lib/gitlab/tracking/destinations/snowplow_micro.rb48
-rw-r--r--lib/gitlab/tracking/standard_context.rb15
-rw-r--r--lib/gitlab/url_blocker.rb16
-rw-r--r--lib/gitlab/usage/metric.rb4
-rw-r--r--lib/gitlab/usage/metrics/names_suggestions/generator.rb8
-rw-r--r--lib/gitlab/usage_data.rb109
-rw-r--r--lib/gitlab/usage_data_counters/known_events/ci_templates.yml20
-rw-r--r--lib/gitlab/usage_data_counters/known_events/code_review_events.yml24
-rw-r--r--lib/gitlab/usage_data_counters/known_events/quickactions.yml4
-rw-r--r--lib/gitlab/usage_data_counters/vscode_extension_activity_unique_counter.rb (renamed from lib/gitlab/usage_data_counters/vs_code_extension_activity_unique_counter.rb)0
-rw-r--r--lib/gitlab/usage_data_metrics.rb12
-rw-r--r--lib/gitlab/usage_data_non_sql_metrics.rb11
-rw-r--r--lib/gitlab/usage_data_queries.rb11
-rw-r--r--lib/gitlab/utils/usage_data.rb9
-rw-r--r--lib/gitlab/webpack/file_loader.rb65
-rw-r--r--lib/gitlab/webpack/graphql_known_operations.rb25
-rw-r--r--lib/gitlab/webpack/manifest.rb50
-rw-r--r--lib/gitlab/workhorse.rb5
-rw-r--r--lib/gitlab/x509/certificate.rb28
-rw-r--r--lib/gitlab/zentao/client.rb19
-rw-r--r--lib/gitlab/zentao/query.rb78
316 files changed, 6510 insertions, 3030 deletions
diff --git a/lib/gitlab/access.rb b/lib/gitlab/access.rb
index d3c96a0f934..3e09d488bc3 100644
--- a/lib/gitlab/access.rb
+++ b/lib/gitlab/access.rb
@@ -74,16 +74,32 @@ module Gitlab
end
def protection_options
- {
- "Not protected: Both developers and maintainers can push new commits and force push." => PROTECTION_NONE,
- "Protected against pushes: Developers cannot push new commits, but are allowed to accept merge requests to the branch. Maintainers can push to the branch." => PROTECTION_DEV_CAN_MERGE,
- "Partially protected: Both developers and maintainers can push new commits, but cannot force push." => PROTECTION_DEV_CAN_PUSH,
- "Fully protected: Developers cannot push new commits, but maintainers can. No one can force push." => PROTECTION_FULL
- }
+ [
+ {
+ label: s_('DefaultBranchProtection|Not protected'),
+ help_text: s_('DefaultBranchProtection|Both developers and maintainers can push new commits, force push, or delete the branch.'),
+ value: PROTECTION_NONE
+ },
+ {
+ label: s_('DefaultBranchProtection|Protected against pushes'),
+ help_text: s_('DefaultBranchProtection|Developers cannot push new commits, but are allowed to accept merge requests to the branch. Maintainers can push to the branch.'),
+ value: PROTECTION_DEV_CAN_MERGE
+ },
+ {
+ label: s_('DefaultBranchProtection|Partially protected'),
+ help_text: s_('DefaultBranchProtection|Both developers and maintainers can push new commits, but cannot force push.'),
+ value: PROTECTION_DEV_CAN_PUSH
+ },
+ {
+ label: s_('DefaultBranchProtection|Fully protected'),
+ help_text: s_('DefaultBranchProtection|Developers cannot push new commits, but maintainers can. No one can force push.'),
+ value: PROTECTION_FULL
+ }
+ ]
end
def protection_values
- protection_options.values
+ protection_options.map { |option| option[:value] }
end
def human_access(access)
diff --git a/lib/gitlab/action_cable/config.rb b/lib/gitlab/action_cable/config.rb
index 38e870353eb..77d4ec0733d 100644
--- a/lib/gitlab/action_cable/config.rb
+++ b/lib/gitlab/action_cable/config.rb
@@ -4,10 +4,6 @@ module Gitlab
module ActionCable
class Config
class << self
- def in_app?
- Gitlab::Utils.to_boolean(ENV.fetch('ACTION_CABLE_IN_APP', false))
- end
-
def worker_pool_size
ENV.fetch('ACTION_CABLE_WORKER_POOL_SIZE', 4).to_i
end
diff --git a/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder.rb b/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder.rb
new file mode 100644
index 00000000000..1e50c980a3a
--- /dev/null
+++ b/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ module CycleAnalytics
+ module Aggregated
+ # rubocop: disable CodeReuse/ActiveRecord
+ class BaseQueryBuilder
+ include StageQueryHelpers
+
+ MODEL_CLASSES = {
+ MergeRequest.to_s => ::Analytics::CycleAnalytics::MergeRequestStageEvent,
+ Issue.to_s => ::Analytics::CycleAnalytics::IssueStageEvent
+ }.freeze
+
+ # Allowed params:
+ # * from - stage end date filter start date
+ # * to - stage end date filter to date
+ # * author_username
+ # * milestone_title
+ # * label_name (array)
+ # * assignee_username (array)
+ # * project_ids (array)
+ def initialize(stage:, params: {})
+ @stage = stage
+ @params = params
+ @root_ancestor = stage.parent.root_ancestor
+ @stage_event_model = MODEL_CLASSES.fetch(stage.subject_class.to_s)
+ end
+
+ def build
+ query = base_query
+ query = filter_by_stage_parent(query)
+ query = filter_author(query)
+ query = filter_milestone_ids(query)
+ query = filter_label_names(query)
+ filter_assignees(query)
+ end
+
+ def filter_author(query)
+ return query if params[:author_username].blank?
+
+ user = User.by_username(params[:author_username]).first
+
+ return query.none if user.blank?
+
+ query.authored(user)
+ end
+
+ def filter_milestone_ids(query)
+ return query if params[:milestone_title].blank?
+
+ milestone = MilestonesFinder
+ .new(group_ids: root_ancestor.self_and_descendant_ids, project_ids: root_ancestor.all_projects.select(:id), title: params[:milestone_title])
+ .execute
+ .first
+
+ return query.none if milestone.blank?
+
+ query.with_milestone_id(milestone.id)
+ end
+
+ def filter_label_names(query)
+ return query if params[:label_name].blank?
+
+ LabelFilter.new(
+ stage: stage,
+ params: params,
+ project: nil,
+ group: root_ancestor
+ ).filter(query)
+ end
+
+ def filter_assignees(query)
+ return query if params[:assignee_username].blank?
+
+ Issuables::AssigneeFilter
+ .new(params: { assignee_username: params[:assignee_username] })
+ .filter(query)
+ end
+
+ def filter_by_stage_parent(query)
+ query.by_project_id(stage.parent_id)
+ end
+
+ def base_query
+ query = stage_event_model
+ .by_stage_event_hash_id(stage.stage_event_hash_id)
+
+ from = params[:from] || 30.days.ago
+ if in_progress?
+ query = query
+ .end_event_is_not_happened_yet
+ .opened_state
+ .start_event_timestamp_after(from)
+ query = query.start_event_timestamp_before(params[:to]) if params[:to]
+ else
+ query = query.end_event_timestamp_after(from)
+ query = query.end_event_timestamp_before(params[:to]) if params[:to]
+ end
+
+ query
+ end
+
+ private
+
+ attr_reader :stage, :params, :root_ancestor, :stage_event_model
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
+Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder.prepend_mod_with('Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder')
diff --git a/lib/gitlab/analytics/cycle_analytics/aggregated/data_collector.rb b/lib/gitlab/analytics/cycle_analytics/aggregated/data_collector.rb
new file mode 100644
index 00000000000..c8b11ecb4a8
--- /dev/null
+++ b/lib/gitlab/analytics/cycle_analytics/aggregated/data_collector.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ module CycleAnalytics
+ module Aggregated
+ # Arguments:
+ # stage - an instance of CycleAnalytics::ProjectStage or CycleAnalytics::GroupStage
+ # params:
+ # current_user: an instance of User
+ # from: DateTime
+ # to: DateTime
+ class DataCollector
+ include Gitlab::Utils::StrongMemoize
+
+ MAX_COUNT = 10001
+
+ delegate :serialized_records, to: :records_fetcher
+
+ def initialize(stage:, params: {})
+ @stage = stage
+ @params = params
+ end
+
+ def median
+ strong_memoize(:median) { Median.new(stage: stage, query: query, params: params) }
+ end
+
+ def count
+ strong_memoize(:count) { limit_count }
+ end
+
+ def records_fetcher
+ strong_memoize(:records_fetcher) do
+ RecordsFetcher.new(stage: stage, query: query, params: params)
+ end
+ end
+
+ private
+
+ attr_reader :stage, :params
+
+ def query
+ BaseQueryBuilder.new(stage: stage, params: params).build
+ end
+
+ def limit_count
+ query.limit(MAX_COUNT).count
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/analytics/cycle_analytics/aggregated/label_filter.rb b/lib/gitlab/analytics/cycle_analytics/aggregated/label_filter.rb
new file mode 100644
index 00000000000..6d87ae91a9c
--- /dev/null
+++ b/lib/gitlab/analytics/cycle_analytics/aggregated/label_filter.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ module CycleAnalytics
+ module Aggregated
+ # This class makes it possible to add label filters to stage event tables
+ class LabelFilter < Issuables::LabelFilter
+ extend ::Gitlab::Utils::Override
+
+ def initialize(stage:, project:, group:, **kwargs)
+ @stage = stage
+
+ super(project: project, group: group, **kwargs)
+ end
+
+ private
+
+ attr_reader :stage
+
+ override :label_link_query
+ def label_link_query(target_model, label_ids: nil)
+ join_column = target_model.arel_table[target_model.issuable_id_column]
+
+ LabelLink.by_target_for_exists_query(stage.subject_class.name, join_column, label_ids)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/analytics/cycle_analytics/aggregated/median.rb b/lib/gitlab/analytics/cycle_analytics/aggregated/median.rb
new file mode 100644
index 00000000000..181ee20948b
--- /dev/null
+++ b/lib/gitlab/analytics/cycle_analytics/aggregated/median.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ module CycleAnalytics
+ module Aggregated
+ class Median
+ include StageQueryHelpers
+
+ def initialize(stage:, query:, params:)
+ @stage = stage
+ @query = query
+ @params = params
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def seconds
+ @query = @query.select(median_duration_in_seconds.as('median')).reorder(nil)
+ result = @query.take || {}
+
+ result['median'] || nil
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def days
+ seconds ? seconds.fdiv(1.day) : nil
+ end
+
+ private
+
+ attr_reader :stage, :query, :params
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher.rb b/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher.rb
new file mode 100644
index 00000000000..7dce757cdc8
--- /dev/null
+++ b/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ module CycleAnalytics
+ module Aggregated
+ class RecordsFetcher
+ include Gitlab::Utils::StrongMemoize
+ include StageQueryHelpers
+
+ MAX_RECORDS = 20
+
+ MAPPINGS = {
+ Issue => {
+ serializer_class: AnalyticsIssueSerializer,
+ includes_for_query: { project: { namespace: [:route] }, author: [] },
+ columns_for_select: %I[title iid id created_at author_id project_id]
+ },
+ MergeRequest => {
+ serializer_class: AnalyticsMergeRequestSerializer,
+ includes_for_query: { target_project: [:namespace], author: [] },
+ columns_for_select: %I[title iid id created_at author_id state_id target_project_id]
+ }
+ }.freeze
+
+ def initialize(stage:, query:, params: {})
+ @stage = stage
+ @query = query
+ @params = params
+ @sort = params[:sort] || :end_event
+ @direction = params[:direction] || :desc
+ @page = params[:page] || 1
+ @per_page = MAX_RECORDS
+ @stage_event_model = query.model
+ end
+
+ def serialized_records
+ strong_memoize(:serialized_records) do
+ records = ordered_and_limited_query.select(stage_event_model.arel_table[Arel.star], duration.as('total_time'))
+
+ yield records if block_given?
+ issuables_and_records = load_issuables(records)
+
+ preload_associations(issuables_and_records.map(&:first))
+
+ issuables_and_records.map do |issuable, record|
+ project = issuable.project
+ attributes = issuable.attributes.merge({
+ project_path: project.path,
+ namespace_path: project.namespace.route.path,
+ author: issuable.author,
+ total_time: record.total_time
+ })
+ serializer.represent(attributes)
+ end
+ end
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def ordered_and_limited_query
+ sorting_options = {
+ end_event: {
+ asc: -> { query.order(end_event_timestamp: :asc) },
+ desc: -> { query.order(end_event_timestamp: :desc) }
+ },
+ duration: {
+ asc: -> { query.order(duration.asc) },
+ desc: -> { query.order(duration.desc) }
+ }
+ }
+
+ sort_lambda = sorting_options.dig(sort, direction) || sorting_options.dig(:end_event, :desc)
+
+ sort_lambda.call
+ .page(page)
+ .per(per_page)
+ .without_count
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ attr_reader :stage, :query, :sort, :direction, :params, :page, :per_page, :stage_event_model
+
+ delegate :subject_class, to: :stage
+
+ def load_issuables(stage_event_records)
+ stage_event_records_by_issuable_id = stage_event_records.index_by(&:issuable_id)
+
+ issuable_model = stage_event_model.issuable_model
+ issuables_by_id = issuable_model.id_in(stage_event_records_by_issuable_id.keys).index_by(&:id)
+
+ stage_event_records_by_issuable_id.map do |issuable_id, record|
+ [issuables_by_id[issuable_id], record] if issuables_by_id[issuable_id]
+ end.compact
+ end
+
+ def serializer
+ MAPPINGS.fetch(subject_class).fetch(:serializer_class).new
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def preload_associations(records)
+ ActiveRecord::Associations::Preloader.new.preload(
+ records,
+ MAPPINGS.fetch(subject_class).fetch(:includes_for_query)
+ )
+
+ records
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb b/lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb
new file mode 100644
index 00000000000..f23d1832df9
--- /dev/null
+++ b/lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ module CycleAnalytics
+ module Aggregated
+ module StageQueryHelpers
+ def percentile_cont
+ percentile_cont_ordering = Arel::Nodes::UnaryOperation.new(Arel::Nodes::SqlLiteral.new('ORDER BY'), duration)
+ Arel::Nodes::NamedFunction.new(
+ 'percentile_cont(0.5) WITHIN GROUP',
+ [percentile_cont_ordering]
+ )
+ end
+
+ def duration
+ if in_progress?
+ Arel::Nodes::Subtraction.new(
+ Arel::Nodes::NamedFunction.new('TO_TIMESTAMP', [Time.current.to_i]),
+ query.model.arel_table[:start_event_timestamp]
+ )
+ else
+ Arel::Nodes::Subtraction.new(
+ query.model.arel_table[:end_event_timestamp],
+ query.model.arel_table[:start_event_timestamp]
+ )
+ end
+ end
+
+ def median_duration_in_seconds
+ Arel::Nodes::Extract.new(percentile_cont, :epoch)
+ end
+
+ def in_progress?
+ params[:end_event_filter] == :in_progress
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/analytics/cycle_analytics/data_collector.rb b/lib/gitlab/analytics/cycle_analytics/data_collector.rb
index 56179533ffb..a20481dd39e 100644
--- a/lib/gitlab/analytics/cycle_analytics/data_collector.rb
+++ b/lib/gitlab/analytics/cycle_analytics/data_collector.rb
@@ -23,13 +23,21 @@ module Gitlab
def records_fetcher
strong_memoize(:records_fetcher) do
- RecordsFetcher.new(stage: stage, query: query, params: params)
+ if use_aggregated_data_collector?
+ aggregated_data_collector.records_fetcher
+ else
+ RecordsFetcher.new(stage: stage, query: query, params: params)
+ end
end
end
def median
strong_memoize(:median) do
- Median.new(stage: stage, query: query, params: params)
+ if use_aggregated_data_collector?
+ aggregated_data_collector.median
+ else
+ Median.new(stage: stage, query: query, params: params)
+ end
end
end
@@ -41,7 +49,11 @@ module Gitlab
def count
strong_memoize(:count) do
- limit_count
+ if use_aggregated_data_collector?
+ aggregated_data_collector.count
+ else
+ limit_count
+ end
end
end
@@ -59,6 +71,14 @@ module Gitlab
def limit_count
query.limit(MAX_COUNT).count
end
+
+ def aggregated_data_collector
+ @aggregated_data_collector ||= Aggregated::DataCollector.new(stage: stage, params: params)
+ end
+
+ def use_aggregated_data_collector?
+ params.fetch(:use_aggregated_data_collector, false)
+ end
end
end
end
diff --git a/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb b/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
index f94696e3186..140c4a300ca 100644
--- a/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
+++ b/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
@@ -8,23 +8,11 @@ module Gitlab
include StageQueryHelpers
include Gitlab::CycleAnalytics::MetricsTables
- MAX_RECORDS = 20
-
- MAPPINGS = {
- Issue => {
- serializer_class: AnalyticsIssueSerializer,
- includes_for_query: { project: { namespace: [:route] }, author: [] },
- columns_for_select: %I[title iid id created_at author_id project_id]
- },
- MergeRequest => {
- serializer_class: AnalyticsMergeRequestSerializer,
- includes_for_query: { target_project: [:namespace], author: [] },
- columns_for_select: %I[title iid id created_at author_id state_id target_project_id]
- }
- }.freeze
-
delegate :subject_class, to: :stage
+ MAX_RECORDS = Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher::MAX_RECORDS
+ MAPPINGS = Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher::MAPPINGS
+
def initialize(stage:, query:, params: {})
@stage = stage
@query = query
diff --git a/lib/gitlab/analytics/cycle_analytics/request_params.rb b/lib/gitlab/analytics/cycle_analytics/request_params.rb
index 94e20762368..bc9d94ef09c 100644
--- a/lib/gitlab/analytics/cycle_analytics/request_params.rb
+++ b/lib/gitlab/analytics/cycle_analytics/request_params.rb
@@ -79,7 +79,8 @@ module Gitlab
sort: sort&.to_sym,
direction: direction&.to_sym,
page: page,
- end_event_filter: end_event_filter.to_sym
+ end_event_filter: end_event_filter.to_sym,
+ use_aggregated_data_collector: Feature.enabled?(:use_vsa_aggregated_tables, group || project, default_enabled: :yaml)
}.merge(attributes.symbolize_keys.slice(*FINDER_PARAM_NAMES))
end
diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb
index 7c37f67b766..3db2f1295f9 100644
--- a/lib/gitlab/application_rate_limiter.rb
+++ b/lib/gitlab/application_rate_limiter.rb
@@ -11,6 +11,8 @@ module Gitlab
# redirect_to(edit_project_path(@project), status: :too_many_requests)
# end
class ApplicationRateLimiter
+ InvalidKeyError = Class.new(StandardError)
+
def initialize(key, **options)
@key = key
@options = options
@@ -64,39 +66,43 @@ module Gitlab
# @param key [Symbol] Key attribute registered in `.rate_limits`
# @option scope [Array<ActiveRecord>] Array of ActiveRecord models to scope throttling to a specific request (e.g. per user per project)
# @option threshold [Integer] Optional threshold value to override default one registered in `.rate_limits`
- # @option interval [Integer] Optional interval value to override default one registered in `.rate_limits`
# @option users_allowlist [Array<String>] Optional list of usernames to exclude from the limit. This param will only be functional if Scope includes a current user.
#
# @return [Boolean] Whether or not a request should be throttled
def throttled?(key, **options)
- return unless rate_limits[key]
+ raise InvalidKeyError unless rate_limits[key]
return if scoped_user_in_allowlist?(options)
threshold_value = options[:threshold] || threshold(key)
threshold_value > 0 &&
- increment(key, options[:scope], options[:interval]) > threshold_value
+ increment(key, options[:scope]) > threshold_value
end
- # Increments the given cache key and increments the value by 1 with the
- # expiration interval defined in `.rate_limits`.
+ # Increments a cache key that is based on the current time and interval.
+ # So that when time passes to the next interval, the key changes and the count starts again from 0.
+ #
+ # Based on https://github.com/rack/rack-attack/blob/886ba3a18d13c6484cd511a4dc9b76c0d14e5e96/lib/rack/attack/cache.rb#L63-L68
#
# @param key [Symbol] Key attribute registered in `.rate_limits`
# @option scope [Array<ActiveRecord>] Array of ActiveRecord models to scope throttling to a specific request (e.g. per user per project)
- # @option interval [Integer] Optional interval value to override default one registered in `.rate_limits`
#
# @return [Integer] incremented value
- def increment(key, scope, interval = nil)
- value = 0
- interval_value = interval || interval(key)
+ def increment(key, scope)
+ interval_value = interval(key)
+
+ period_key, time_elapsed_in_period = Time.now.to_i.divmod(interval_value)
+
+ cache_key = "#{action_key(key, scope)}:#{period_key}"
+ # We add a 1 second buffer to avoid timing issues when we're at the end of a period
+ expiry = interval_value - time_elapsed_in_period + 1
::Gitlab::Redis::RateLimiting.with do |redis|
- cache_key = action_key(key, scope)
- value = redis.incr(cache_key)
- redis.expire(cache_key, interval_value) if value == 1
+ redis.pipelined do
+ redis.incr(cache_key)
+ redis.expire(cache_key, expiry)
+ end.first
end
-
- value
end
# Logs request using provided logger
diff --git a/lib/gitlab/asciidoc/syntax_highlighter/html_pipeline_adapter.rb b/lib/gitlab/asciidoc/syntax_highlighter/html_pipeline_adapter.rb
index 5fc3323f0fd..6dbe6f691f6 100644
--- a/lib/gitlab/asciidoc/syntax_highlighter/html_pipeline_adapter.rb
+++ b/lib/gitlab/asciidoc/syntax_highlighter/html_pipeline_adapter.rb
@@ -7,7 +7,11 @@ module Gitlab
register_for 'gitlab-html-pipeline'
def format(node, lang, opts)
- %(<pre><code #{lang ? %[ lang="#{lang}"] : ''}>#{node.content}</code></pre>)
+ if Feature.enabled?(:use_cmark_renderer)
+ %(<pre #{lang ? %[lang="#{lang}"] : ''}><code>#{node.content}</code></pre>)
+ else
+ %(<pre><code #{lang ? %[ lang="#{lang}"] : ''}>#{node.content}</code></pre>)
+ end
end
end
end
diff --git a/lib/gitlab/auth/o_auth/provider.rb b/lib/gitlab/auth/o_auth/provider.rb
index ab6ac815601..41a8739b0b6 100644
--- a/lib/gitlab/auth/o_auth/provider.rb
+++ b/lib/gitlab/auth/o_auth/provider.rb
@@ -5,6 +5,7 @@ module Gitlab
module OAuth
class Provider
LABELS = {
+ "dingtalk" => "DingTalk",
"github" => "GitHub",
"gitlab" => "GitLab.com",
"google_oauth2" => "Google",
diff --git a/lib/gitlab/background_migration.rb b/lib/gitlab/background_migration.rb
index 0826887dd0a..22b4b685f81 100644
--- a/lib/gitlab/background_migration.rb
+++ b/lib/gitlab/background_migration.rb
@@ -2,8 +2,12 @@
module Gitlab
module BackgroundMigration
- def self.queue
- @queue ||= BackgroundMigrationWorker.sidekiq_options['queue']
+ def self.coordinator_for_database(database)
+ JobCoordinator.for_database(database)
+ end
+
+ def self.queue(database: :main)
+ coordinator_for_database(database).queue
end
# Begins stealing jobs from the background migrations queue, blocking the
@@ -16,35 +20,10 @@ module Gitlab
# re-raises the exception.
#
# steal_class - The name of the class for which to steal jobs.
- def self.steal(steal_class, retry_dead_jobs: false)
- queues = [
- Sidekiq::ScheduledSet.new,
- Sidekiq::Queue.new(self.queue)
- ]
-
- if retry_dead_jobs
- queues << Sidekiq::RetrySet.new
- queues << Sidekiq::DeadSet.new
- end
-
- queues.each do |queue|
- queue.each do |job|
- migration_class, migration_args = job.args
-
- next unless job.klass == 'BackgroundMigrationWorker'
- next unless migration_class == steal_class
- next if block_given? && !(yield job)
-
- begin
- perform(migration_class, migration_args) if job.delete
- rescue Exception # rubocop:disable Lint/RescueException
- BackgroundMigrationWorker # enqueue this migration again
- .perform_async(migration_class, migration_args)
-
- raise
- end
- end
- end
+ # retry_dead_jobs - Flag to control whether jobs in Sidekiq::RetrySet or Sidekiq::DeadSet are retried.
+ # database - tracking database this migration executes against
+ def self.steal(steal_class, retry_dead_jobs: false, database: :main, &block)
+ coordinator_for_database(database).steal(steal_class, retry_dead_jobs: retry_dead_jobs, &block)
end
##
@@ -55,64 +34,17 @@ module Gitlab
#
# arguments - The arguments to pass to the background migration's "perform"
# method.
- def self.perform(class_name, arguments)
- migration_class_for(class_name).new.perform(*arguments)
- end
-
- def self.remaining
- enqueued = Sidekiq::Queue.new(self.queue)
- scheduled = Sidekiq::ScheduledSet.new
-
- [enqueued, scheduled].sum do |set|
- set.count do |job|
- job.klass == 'BackgroundMigrationWorker'
- end
- end
- end
-
- def self.exists?(migration_class, additional_queues = [])
- enqueued = Sidekiq::Queue.new(self.queue)
- scheduled = Sidekiq::ScheduledSet.new
-
- enqueued_job?([enqueued, scheduled], migration_class)
- end
-
- def self.dead_jobs?(migration_class)
- dead_set = Sidekiq::DeadSet.new
-
- enqueued_job?([dead_set], migration_class)
+ # database - tracking database this migration executes against
+ def self.perform(class_name, arguments, database: :main)
+ coordinator_for_database(database).perform(class_name, arguments)
end
- def self.retrying_jobs?(migration_class)
- retry_set = Sidekiq::RetrySet.new
-
- enqueued_job?([retry_set], migration_class)
- end
-
- def self.migration_class_for(class_name)
- # We don't pass class name with Gitlab::BackgroundMigration:: prefix anymore
- # but some jobs could be already spawned so we need to have some backward compatibility period.
- # Can be removed since 13.x
- full_class_name_prefix_regexp = /\A(::)?Gitlab::BackgroundMigration::/
-
- if class_name.match(full_class_name_prefix_regexp)
- Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
- StandardError.new("Full class name is used"),
- class_name: class_name
- )
-
- class_name = class_name.sub(full_class_name_prefix_regexp, '')
- end
-
- const_get(class_name, false)
+ def self.exists?(migration_class, additional_queues = [], database: :main)
+ coordinator_for_database(database).exists?(migration_class, additional_queues) # rubocop:disable CodeReuse/ActiveRecord
end
- def self.enqueued_job?(queues, migration_class)
- queues.any? do |queue|
- queue.any? do |job|
- job.klass == 'BackgroundMigrationWorker' && job.args.first == migration_class
- end
- end
+ def self.remaining(database: :main)
+ coordinator_for_database(database).remaining
end
end
end
diff --git a/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed.rb b/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed.rb
new file mode 100644
index 00000000000..b39c0953fb1
--- /dev/null
+++ b/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Add user primary email to emails table if confirmed
+ class AddPrimaryEmailToEmailsIfUserConfirmed
+ INNER_BATCH_SIZE = 1_000
+
+ # Stubbed class to access the User table
+ class User < ActiveRecord::Base
+ include ::EachBatch
+
+ self.table_name = 'users'
+ self.inheritance_column = :_type_disabled
+
+ scope :confirmed, -> { where.not(confirmed_at: nil) }
+
+ has_many :emails
+ end
+
+ # Stubbed class to access the Emails table
+ class Email < ActiveRecord::Base
+ self.table_name = 'emails'
+ self.inheritance_column = :_type_disabled
+
+ belongs_to :user
+ end
+
+ def perform(start_id, end_id)
+ User.confirmed.where(id: start_id..end_id).select(:id, :email, :confirmed_at).each_batch(of: INNER_BATCH_SIZE) do |users|
+ current_time = Time.now.utc
+
+ attributes = users.map do |user|
+ {
+ user_id: user.id,
+ email: user.email,
+ confirmed_at: user.confirmed_at,
+ created_at: current_time,
+ updated_at: current_time
+ }
+ end
+
+ Email.insert_all(attributes)
+ end
+ mark_job_as_succeeded(start_id, end_id)
+ end
+
+ private
+
+ def mark_job_as_succeeded(*arguments)
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ 'AddPrimaryEmailToEmailsIfUserConfirmed',
+ arguments
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/backfill_design_internal_ids.rb b/lib/gitlab/background_migration/backfill_design_internal_ids.rb
deleted file mode 100644
index 236c6b6eb9a..00000000000
--- a/lib/gitlab/background_migration/backfill_design_internal_ids.rb
+++ /dev/null
@@ -1,130 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # Backfill design.iid for a range of projects
- class BackfillDesignInternalIds
- # See app/models/internal_id
- # This is a direct copy of the application code with the following changes:
- # - usage enum is hard-coded to the value for design_management_designs
- # - init is not passed around, but ignored
- class InternalId < ActiveRecord::Base
- def self.track_greatest(subject, scope, new_value)
- InternalIdGenerator.new(subject, scope).track_greatest(new_value)
- end
-
- # Increments #last_value with new_value if it is greater than the current,
- # and saves the record
- #
- # The operation locks the record and gathers a `ROW SHARE` lock (in PostgreSQL).
- # As such, the increment is atomic and safe to be called concurrently.
- def track_greatest_and_save!(new_value)
- update_and_save { self.last_value = [last_value || 0, new_value].max }
- end
-
- private
-
- def update_and_save(&block)
- lock!
- yield
- # update_and_save_counter.increment(usage: usage, changed: last_value_changed?)
- save!
- last_value
- end
- end
-
- # See app/models/internal_id
- class InternalIdGenerator
- attr_reader :subject, :scope, :scope_attrs
-
- def initialize(subject, scope)
- @subject = subject
- @scope = scope
-
- raise ArgumentError, 'Scope is not well-defined, need at least one column for scope (given: 0)' if scope.empty?
- end
-
- # Create a record in internal_ids if one does not yet exist
- # and set its new_value if it is higher than the current last_value
- #
- # Note this will acquire a ROW SHARE lock on the InternalId record
- def track_greatest(new_value)
- subject.transaction do
- record.track_greatest_and_save!(new_value)
- end
- end
-
- def record
- @record ||= (lookup || create_record)
- end
-
- def lookup
- InternalId.find_by(**scope, usage: usage_value)
- end
-
- def usage_value
- 10 # see Enums::InternalId - this is the value for design_management_designs
- end
-
- # Create InternalId record for (scope, usage) combination, if it doesn't exist
- #
- # We blindly insert without synchronization. If another process
- # was faster in doing this, we'll realize once we hit the unique key constraint
- # violation. We can safely roll-back the nested transaction and perform
- # a lookup instead to retrieve the record.
- def create_record
- subject.transaction(requires_new: true) do # rubocop:disable Performance/ActiveRecordSubtransactions
- InternalId.create!(
- **scope,
- usage: usage_value,
- last_value: 0
- )
- end
- rescue ActiveRecord::RecordNotUnique
- lookup
- end
- end
-
- attr_reader :design_class
-
- def initialize(design_class)
- @design_class = design_class
- end
-
- def perform(relation)
- start_id, end_id = relation.pluck("min(project_id), max(project_id)").flatten
- table = 'design_management_designs'
-
- ActiveRecord::Base.connection.execute <<~SQL
- WITH
- starting_iids(project_id, iid) as #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}(
- SELECT project_id, MAX(COALESCE(iid, 0))
- FROM #{table}
- WHERE project_id BETWEEN #{start_id} AND #{end_id}
- GROUP BY project_id
- ),
- with_calculated_iid(id, iid) as #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}(
- SELECT design.id,
- init.iid + ROW_NUMBER() OVER (PARTITION BY design.project_id ORDER BY design.id ASC)
- FROM #{table} as design, starting_iids as init
- WHERE design.project_id BETWEEN #{start_id} AND #{end_id}
- AND design.iid IS NULL
- AND init.project_id = design.project_id
- )
-
- UPDATE #{table}
- SET iid = with_calculated_iid.iid
- FROM with_calculated_iid
- WHERE #{table}.id = with_calculated_iid.id
- SQL
-
- # track the new greatest IID value
- relation.each do |design|
- current_max = design_class.where(project_id: design.project_id).maximum(:iid)
- scope = { project_id: design.project_id }
- InternalId.track_greatest(design, scope, current_max)
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/backfill_project_repositories.rb b/lib/gitlab/background_migration/backfill_project_repositories.rb
index a9eaeb0562d..05e2ed72fb3 100644
--- a/lib/gitlab/background_migration/backfill_project_repositories.rb
+++ b/lib/gitlab/background_migration/backfill_project_repositories.rb
@@ -189,7 +189,7 @@ module Gitlab
end
def perform(start_id, stop_id)
- Gitlab::Database.main.bulk_insert(:project_repositories, project_repositories(start_id, stop_id)) # rubocop:disable Gitlab/BulkInsert
+ ApplicationRecord.legacy_bulk_insert(:project_repositories, project_repositories(start_id, stop_id)) # rubocop:disable Gitlab/BulkInsert
end
private
diff --git a/lib/gitlab/background_migration/backfill_user_namespace.rb b/lib/gitlab/background_migration/backfill_user_namespace.rb
new file mode 100644
index 00000000000..f55eaa3b14e
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_user_namespace.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfills the `namespaces.type` column, replacing any
+ # instances of `NULL` with `User`
+ class BackfillUserNamespace
+ include Gitlab::Database::DynamicModelHelpers
+
+ def perform(start_id, end_id, batch_table, batch_column, sub_batch_size, pause_ms)
+ parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id)
+ parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size, order_hint: :type) do |sub_batch|
+ batch_metrics.time_operation(:update_all) do
+ sub_batch.update_all(type: 'User')
+ end
+ pause_ms = 0 if pause_ms < 0
+ sleep(pause_ms * 0.001)
+ end
+ end
+
+ def batch_metrics
+ @batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new
+ end
+
+ private
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+
+ def relation_scoped_to_range(source_table, source_key_column, start_id, stop_id)
+ define_batchable_model(source_table)
+ .where(source_key_column => start_id..stop_id)
+ .where(type: nil)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics.rb b/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics.rb
deleted file mode 100644
index 691bdb457d7..00000000000
--- a/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class CopyMergeRequestTargetProjectToMergeRequestMetrics
- extend ::Gitlab::Utils::Override
-
- def perform(start_id, stop_id)
- ActiveRecord::Base.connection.execute <<~SQL
- WITH merge_requests_batch AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
- SELECT id, target_project_id
- FROM merge_requests WHERE id BETWEEN #{Integer(start_id)} AND #{Integer(stop_id)}
- )
- UPDATE
- merge_request_metrics
- SET
- target_project_id = merge_requests_batch.target_project_id
- FROM merge_requests_batch
- WHERE merge_request_metrics.merge_request_id=merge_requests_batch.id
- SQL
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/fix_merge_request_diff_commit_users.rb b/lib/gitlab/background_migration/fix_merge_request_diff_commit_users.rb
new file mode 100644
index 00000000000..ea3e56cb14a
--- /dev/null
+++ b/lib/gitlab/background_migration/fix_merge_request_diff_commit_users.rb
@@ -0,0 +1,156 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Background migration for fixing merge_request_diff_commit rows that don't
+ # have committer/author details due to
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/344080.
+ #
+ # This migration acts on a single project and corrects its data. Because
+ # this process needs Git/Gitaly access, and duplicating all that code is far
+ # too much, this migration relies on global models such as Project,
+ # MergeRequest, etc.
+ # rubocop: disable Metrics/ClassLength
+ class FixMergeRequestDiffCommitUsers
+ BATCH_SIZE = 100
+
+ def initialize
+ @commits = {}
+ @users = {}
+ end
+
+ def perform(project_id)
+ if (project = ::Project.find_by_id(project_id))
+ process(project)
+ end
+
+ ::Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ 'FixMergeRequestDiffCommitUsers',
+ [project_id]
+ )
+
+ schedule_next_job
+ end
+
+ def process(project)
+ # Loading everything using one big query may result in timeouts (e.g.
+ # for projects the size of gitlab-org/gitlab). So instead we query
+ # data on a per merge request basis.
+ project.merge_requests.each_batch(column: :iid) do |mrs|
+ mrs.ids.each do |mr_id|
+ each_row_to_check(mr_id) do |commit|
+ update_commit(project, commit)
+ end
+ end
+ end
+ end
+
+ def each_row_to_check(merge_request_id, &block)
+ columns = %w[merge_request_diff_id relative_order].map do |col|
+ Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: col,
+ order_expression: MergeRequestDiffCommit.arel_table[col.to_sym].asc,
+ nullable: :not_nullable,
+ distinct: false
+ )
+ end
+
+ order = Pagination::Keyset::Order.build(columns)
+ scope = MergeRequestDiffCommit
+ .joins(:merge_request_diff)
+ .where(merge_request_diffs: { merge_request_id: merge_request_id })
+ .where('commit_author_id IS NULL OR committer_id IS NULL')
+ .order(order)
+
+ Pagination::Keyset::Iterator
+ .new(scope: scope, use_union_optimization: true)
+ .each_batch(of: BATCH_SIZE) do |rows|
+ rows
+ .select([
+ :merge_request_diff_id,
+ :relative_order,
+ :sha,
+ :committer_id,
+ :commit_author_id
+ ])
+ .each(&block)
+ end
+ end
+
+ # rubocop: disable Metrics/AbcSize
+ def update_commit(project, row)
+ commit = find_commit(project, row.sha)
+ updates = []
+
+ unless row.commit_author_id
+ author_id = find_or_create_user(commit, :author_name, :author_email)
+
+ updates << [arel_table[:commit_author_id], author_id] if author_id
+ end
+
+ unless row.committer_id
+ committer_id =
+ find_or_create_user(commit, :committer_name, :committer_email)
+
+ updates << [arel_table[:committer_id], committer_id] if committer_id
+ end
+
+ return if updates.empty?
+
+ update = Arel::UpdateManager
+ .new
+ .table(MergeRequestDiffCommit.arel_table)
+ .where(matches_row(row))
+ .set(updates)
+ .to_sql
+
+ MergeRequestDiffCommit.connection.execute(update)
+ end
+ # rubocop: enable Metrics/AbcSize
+
+ def schedule_next_job
+ job = Database::BackgroundMigrationJob
+ .for_migration_class('FixMergeRequestDiffCommitUsers')
+ .pending
+ .first
+
+ return unless job
+
+ BackgroundMigrationWorker.perform_in(
+ 2.minutes,
+ 'FixMergeRequestDiffCommitUsers',
+ job.arguments
+ )
+ end
+
+ def find_commit(project, sha)
+ @commits[sha] ||= (project.commit(sha)&.to_hash || {})
+ end
+
+ def find_or_create_user(commit, name_field, email_field)
+ name = commit[name_field]
+ email = commit[email_field]
+
+ return unless name && email
+
+ @users[[name, email]] ||=
+ MergeRequest::DiffCommitUser.find_or_create(name, email).id
+ end
+
+ def matches_row(row)
+ primary_key = Arel::Nodes::Grouping
+ .new([arel_table[:merge_request_diff_id], arel_table[:relative_order]])
+
+ primary_val = Arel::Nodes::Grouping
+ .new([row.merge_request_diff_id, row.relative_order])
+
+ primary_key.eq(primary_val)
+ end
+
+ def arel_table
+ MergeRequestDiffCommit.arel_table
+ end
+ end
+ # rubocop: enable Metrics/ClassLength
+ end
+end
diff --git a/lib/gitlab/background_migration/fix_orphan_promoted_issues.rb b/lib/gitlab/background_migration/fix_orphan_promoted_issues.rb
deleted file mode 100644
index c50bf430d92..00000000000
--- a/lib/gitlab/background_migration/fix_orphan_promoted_issues.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # No OP for CE
- class FixOrphanPromotedIssues
- def perform(note_id)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::FixOrphanPromotedIssues.prepend_mod_with('Gitlab::BackgroundMigration::FixOrphanPromotedIssues')
diff --git a/lib/gitlab/background_migration/fix_ruby_object_in_audit_events.rb b/lib/gitlab/background_migration/fix_ruby_object_in_audit_events.rb
deleted file mode 100644
index 47a68c61fcc..00000000000
--- a/lib/gitlab/background_migration/fix_ruby_object_in_audit_events.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # Remove serialized Ruby object in audit_events
- class FixRubyObjectInAuditEvents
- def perform(start_id, stop_id)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::FixRubyObjectInAuditEvents.prepend_mod_with('Gitlab::BackgroundMigration::FixRubyObjectInAuditEvents')
diff --git a/lib/gitlab/background_migration/job_coordinator.rb b/lib/gitlab/background_migration/job_coordinator.rb
new file mode 100644
index 00000000000..1c8819eaa62
--- /dev/null
+++ b/lib/gitlab/background_migration/job_coordinator.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Class responsible for executing background migrations based on the given database.
+ #
+ # Chooses the correct worker class when selecting jobs from the queue based on the
+ # convention of how the queues and worker classes are setup for each database.
+ #
+ # Also provides a database connection to the correct tracking database.
+ class JobCoordinator
+ VALID_DATABASES = %i[main].freeze
+ WORKER_CLASS_NAME = 'BackgroundMigrationWorker'
+
+ def self.for_database(database)
+ database = database.to_sym
+
+ unless VALID_DATABASES.include?(database)
+ raise ArgumentError, "database must be one of [#{VALID_DATABASES.join(', ')}], got '#{database}'"
+ end
+
+ namespace = database.to_s.capitalize unless database == :main
+ namespaced_worker_class = [namespace, WORKER_CLASS_NAME].compact.join('::')
+
+ new(database, "::#{namespaced_worker_class}".constantize)
+ end
+
+ attr_reader :database, :worker_class
+
+ def queue
+ @queue ||= worker_class.sidekiq_options['queue']
+ end
+
+ def with_shared_connection(&block)
+ Gitlab::Database::SharedModel.using_connection(connection, &block)
+ end
+
+ def steal(steal_class, retry_dead_jobs: false)
+ with_shared_connection do
+ queues = [
+ Sidekiq::ScheduledSet.new,
+ Sidekiq::Queue.new(self.queue)
+ ]
+
+ if retry_dead_jobs
+ queues << Sidekiq::RetrySet.new
+ queues << Sidekiq::DeadSet.new
+ end
+
+ queues.each do |queue|
+ queue.each do |job|
+ migration_class, migration_args = job.args
+
+ next unless job.klass == worker_class.name
+ next unless migration_class == steal_class
+ next if block_given? && !(yield job)
+
+ begin
+ perform(migration_class, migration_args) if job.delete
+ rescue Exception # rubocop:disable Lint/RescueException
+ worker_class # enqueue this migration again
+ .perform_async(migration_class, migration_args)
+
+ raise
+ end
+ end
+ end
+ end
+ end
+
+ def perform(class_name, arguments)
+ with_shared_connection do
+ migration_class_for(class_name).new.perform(*arguments)
+ end
+ end
+
+ def remaining
+ enqueued = Sidekiq::Queue.new(self.queue)
+ scheduled = Sidekiq::ScheduledSet.new
+
+ [enqueued, scheduled].sum do |set|
+ set.count do |job|
+ job.klass == worker_class.name
+ end
+ end
+ end
+
+ def exists?(migration_class, additional_queues = [])
+ enqueued = Sidekiq::Queue.new(self.queue)
+ scheduled = Sidekiq::ScheduledSet.new
+
+ enqueued_job?([enqueued, scheduled], migration_class)
+ end
+
+ def dead_jobs?(migration_class)
+ dead_set = Sidekiq::DeadSet.new
+
+ enqueued_job?([dead_set], migration_class)
+ end
+
+ def retrying_jobs?(migration_class)
+ retry_set = Sidekiq::RetrySet.new
+
+ enqueued_job?([retry_set], migration_class)
+ end
+
+ def migration_class_for(class_name)
+ Gitlab::BackgroundMigration.const_get(class_name, false)
+ end
+
+ def enqueued_job?(queues, migration_class)
+ queues.any? do |queue|
+ queue.any? do |job|
+ job.klass == worker_class.name && job.args.first == migration_class
+ end
+ end
+ end
+
+ private
+
+ def initialize(database, worker_class)
+ @database = database
+ @worker_class = worker_class
+ end
+
+ def connection
+ @connection ||= Gitlab::Database
+ .database_base_models
+ .fetch(database, Gitlab::Database::PRIMARY_DATABASE_NAME)
+ .connection
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys.rb b/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys.rb
index 1c60473750d..36a339c6b80 100644
--- a/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys.rb
+++ b/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys.rb
@@ -34,7 +34,7 @@ module Gitlab
end
end
- Gitlab::Database.main.bulk_insert(TEMP_TABLE, fingerprints) # rubocop:disable Gitlab/BulkInsert
+ ApplicationRecord.legacy_bulk_insert(TEMP_TABLE, fingerprints) # rubocop:disable Gitlab/BulkInsert
execute("ANALYZE #{TEMP_TABLE}")
diff --git a/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data.rb b/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data.rb
deleted file mode 100644
index 14c72bb4a72..00000000000
--- a/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data.rb
+++ /dev/null
@@ -1,146 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # This migration takes all issue trackers
- # and move data from properties to data field tables (jira_tracker_data and issue_tracker_data)
- class MigrateIssueTrackersSensitiveData
- delegate :select_all, :execute, :quote_string, to: :connection
-
- # we need to define this class and set fields encryption
- class IssueTrackerData < ApplicationRecord
- self.table_name = 'issue_tracker_data'
-
- def self.encryption_options
- {
- key: Settings.attr_encrypted_db_key_base_32,
- encode: true,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm'
- }
- end
-
- attr_encrypted :project_url, encryption_options
- attr_encrypted :issues_url, encryption_options
- attr_encrypted :new_issue_url, encryption_options
- end
-
- # we need to define this class and set fields encryption
- class JiraTrackerData < ApplicationRecord
- self.table_name = 'jira_tracker_data'
-
- def self.encryption_options
- {
- key: Settings.attr_encrypted_db_key_base_32,
- encode: true,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm'
- }
- end
-
- attr_encrypted :url, encryption_options
- attr_encrypted :api_url, encryption_options
- attr_encrypted :username, encryption_options
- attr_encrypted :password, encryption_options
- end
-
- def perform(start_id, stop_id)
- columns = 'id, properties, title, description, type'
- batch_condition = "id >= #{start_id} AND id <= #{stop_id} AND category = 'issue_tracker' \
- AND properties IS NOT NULL AND properties != '{}' AND properties != ''"
-
- data_subselect = "SELECT 1 \
- FROM jira_tracker_data \
- WHERE jira_tracker_data.service_id = services.id \
- UNION SELECT 1 \
- FROM issue_tracker_data \
- WHERE issue_tracker_data.service_id = services.id"
-
- query = "SELECT #{columns} FROM services WHERE #{batch_condition} AND NOT EXISTS (#{data_subselect})"
-
- migrated_ids = []
- data_to_insert(query).each do |table, data|
- service_ids = data.map { |s| s['service_id'] }
-
- next if service_ids.empty?
-
- migrated_ids += service_ids
- Gitlab::Database.main.bulk_insert(table, data) # rubocop:disable Gitlab/BulkInsert
- end
-
- return if migrated_ids.empty?
-
- move_title_description(migrated_ids)
- end
-
- private
-
- def data_to_insert(query)
- data = { 'jira_tracker_data' => [], 'issue_tracker_data' => [] }
- select_all(query).each do |service|
- begin
- properties = Gitlab::Json.parse(service['properties'])
- rescue JSON::ParserError
- logger.warn(
- message: 'Properties data not parsed - invalid json',
- service_id: service['id'],
- properties: service['properties']
- )
- next
- end
-
- if service['type'] == 'JiraService'
- row = data_row(JiraTrackerData, jira_mapping(properties), service)
- key = 'jira_tracker_data'
- else
- row = data_row(IssueTrackerData, issue_tracker_mapping(properties), service)
- key = 'issue_tracker_data'
- end
-
- data[key] << row if row
- end
-
- data
- end
-
- def data_row(klass, mapping, service)
- base_params = { service_id: service['id'], created_at: Time.current, updated_at: Time.current }
- klass.new(mapping).slice(*klass.column_names).compact.merge(base_params)
- end
-
- def move_title_description(service_ids)
- query = "UPDATE services SET \
- title = cast(properties as json)->>'title', \
- description = cast(properties as json)->>'description' \
- WHERE id IN (#{service_ids.join(',')}) AND title IS NULL AND description IS NULL"
-
- execute(query)
- end
-
- def jira_mapping(properties)
- {
- url: properties['url'],
- api_url: properties['api_url'],
- username: properties['username'],
- password: properties['password']
- }
- end
-
- def issue_tracker_mapping(properties)
- {
- project_url: properties['project_url'],
- issues_url: properties['issues_url'],
- new_issue_url: properties['new_issue_url']
- }
- end
-
- def connection
- @connection ||= ActiveRecord::Base.connection
- end
-
- def logger
- @logger ||= Gitlab::BackgroundMigration::Logger.build
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/migrate_requirements_to_work_items.rb b/lib/gitlab/background_migration/migrate_requirements_to_work_items.rb
new file mode 100644
index 00000000000..017791f197c
--- /dev/null
+++ b/lib/gitlab/background_migration/migrate_requirements_to_work_items.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # No op on CE
+ class MigrateRequirementsToWorkItems
+ def perform(start_id, end_id)
+ end
+ end
+ end
+end
+
+Gitlab::BackgroundMigration::MigrateRequirementsToWorkItems.prepend_mod_with('Gitlab::BackgroundMigration::MigrateRequirementsToWorkItems')
diff --git a/lib/gitlab/background_migration/migrate_users_bio_to_user_details.rb b/lib/gitlab/background_migration/migrate_users_bio_to_user_details.rb
deleted file mode 100644
index bbe2164ae4e..00000000000
--- a/lib/gitlab/background_migration/migrate_users_bio_to_user_details.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class MigrateUsersBioToUserDetails
- class User < ActiveRecord::Base
- self.table_name = 'users'
- end
-
- class UserDetails < ActiveRecord::Base
- self.table_name = 'user_details'
- end
-
- def perform(start_id, stop_id)
- relation = User
- .select("id AS user_id", "substring(COALESCE(bio, '') from 1 for 255) AS bio")
- .where("(COALESCE(bio, '') IS DISTINCT FROM '')")
- .where(id: (start_id..stop_id))
-
- ActiveRecord::Base.connection.execute <<-EOF.strip_heredoc
- INSERT INTO user_details
- (user_id, bio)
- #{relation.to_sql}
- ON CONFLICT (user_id)
- DO UPDATE SET
- "bio" = EXCLUDED."bio";
- EOF
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_issue_email_participants.rb b/lib/gitlab/background_migration/populate_issue_email_participants.rb
index 0a56ac1dae8..2b959b81f45 100644
--- a/lib/gitlab/background_migration/populate_issue_email_participants.rb
+++ b/lib/gitlab/background_migration/populate_issue_email_participants.rb
@@ -21,7 +21,7 @@ module Gitlab
}
end
- Gitlab::Database.main.bulk_insert(:issue_email_participants, rows, on_conflict: :do_nothing) # rubocop:disable Gitlab/BulkInsert
+ ApplicationRecord.legacy_bulk_insert(:issue_email_participants, rows, on_conflict: :do_nothing) # rubocop:disable Gitlab/BulkInsert
end
end
end
diff --git a/lib/gitlab/background_migration/populate_user_highest_roles_table.rb b/lib/gitlab/background_migration/populate_user_highest_roles_table.rb
deleted file mode 100644
index 16386ebf9c3..00000000000
--- a/lib/gitlab/background_migration/populate_user_highest_roles_table.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # This background migration creates records on user_highest_roles according to
- # the given user IDs range. IDs will load users with a left outer joins to
- # have a record for users without a Group or Project. One INSERT per ID is
- # issued.
- class PopulateUserHighestRolesTable
- BATCH_SIZE = 100
-
- # rubocop:disable Style/Documentation
- class User < ActiveRecord::Base
- self.table_name = 'users'
-
- scope :active, -> {
- where(state: 'active', user_type: nil, bot_type: nil)
- .where('ghost IS NOT TRUE')
- }
- end
-
- def perform(from_id, to_id)
- return unless User.column_names.include?('bot_type')
-
- (from_id..to_id).each_slice(BATCH_SIZE) do |ids|
- execute(
- <<-EOF
- INSERT INTO user_highest_roles (updated_at, user_id, highest_access_level)
- #{select_sql(from_id, to_id)}
- ON CONFLICT (user_id) DO
- UPDATE SET highest_access_level = EXCLUDED.highest_access_level
- EOF
- )
- end
- end
-
- private
-
- def select_sql(from_id, to_id)
- User
- .select('NOW() as updated_at, users.id, MAX(access_level) AS highest_access_level')
- .joins('LEFT OUTER JOIN members ON members.user_id = users.id AND members.requested_at IS NULL')
- .where(users: { id: active_user_ids(from_id, to_id) })
- .group('users.id')
- .to_sql
- end
-
- def active_user_ids(from_id, to_id)
- User.active.where(users: { id: from_id..to_id }).pluck(:id)
- end
-
- def execute(sql)
- @connection ||= ActiveRecord::Base.connection
- @connection.execute(sql)
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
new file mode 100644
index 00000000000..8e94c16369e
--- /dev/null
+++ b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
@@ -0,0 +1,151 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ module ProjectNamespaces
+ # Back-fill project namespaces for projects that do not yet have a namespace.
+ #
+ # TODO: remove this comment when an actuall backfill migration is added.
+ #
+ # This is first being added without an actual migration as we need to initially test
+ # if backfilling project namespaces affects performance in any significant way.
+ # rubocop: disable Metrics/ClassLength
+ class BackfillProjectNamespaces
+ BATCH_SIZE = 100
+ DELETE_BATCH_SIZE = 10
+ PROJECT_NAMESPACE_STI_NAME = 'Project'
+
+ IsolatedModels = ::Gitlab::BackgroundMigration::ProjectNamespaces::Models
+
+ def perform(start_id, end_id, namespace_id, migration_type = 'up')
+ load_project_ids(start_id, end_id, namespace_id)
+
+ case migration_type
+ when 'up'
+ backfill_project_namespaces(namespace_id)
+ mark_job_as_succeeded(start_id, end_id, namespace_id, 'up')
+ when 'down'
+ cleanup_backfilled_project_namespaces(namespace_id)
+ mark_job_as_succeeded(start_id, end_id, namespace_id, 'down')
+ else
+ raise "Unknown migration type"
+ end
+ end
+
+ private
+
+ attr_accessor :project_ids
+
+ def backfill_project_namespaces(namespace_id)
+ project_ids.each_slice(BATCH_SIZE) do |project_ids|
+ # We need to lock these project records for the period when we create project namespaces
+ # and link them to projects so that if a project is modified in the time between creating
+ # project namespaces `batch_insert_namespaces` and linking them to projects `batch_update_projects`
+ # we do not get them out of sync.
+ #
+ # see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72527#note_730679469
+ Project.transaction do
+ Project.where(id: project_ids).select(:id).lock!('FOR UPDATE')
+
+ batch_insert_namespaces(project_ids)
+ batch_update_projects(project_ids)
+ end
+
+ batch_update_project_namespaces_traversal_ids(project_ids)
+ end
+ end
+
+ def cleanup_backfilled_project_namespaces(namespace_id)
+ project_ids.each_slice(BATCH_SIZE) do |project_ids|
+ # IMPORTANT: first nullify project_namespace_id in projects table to avoid removing projects when records
+ # from namespaces are deleted due to FK/triggers
+ nullify_project_namespaces_in_projects(project_ids)
+ delete_project_namespace_records(project_ids)
+ end
+ end
+
+ def batch_insert_namespaces(project_ids)
+ projects = IsolatedModels::Project.where(id: project_ids)
+ .select("projects.id, projects.name, projects.path, projects.namespace_id, projects.visibility_level, shared_runners_enabled, '#{PROJECT_NAMESPACE_STI_NAME}', now(), now()")
+
+ ActiveRecord::Base.connection.execute <<~SQL
+ INSERT INTO namespaces (tmp_project_id, name, path, parent_id, visibility_level, shared_runners_enabled, type, created_at, updated_at)
+ #{projects.to_sql}
+ ON CONFLICT DO NOTHING;
+ SQL
+ end
+
+ def batch_update_projects(project_ids)
+ projects = IsolatedModels::Project.where(id: project_ids)
+ .joins("INNER JOIN namespaces ON projects.id = namespaces.tmp_project_id")
+ .select("namespaces.id, namespaces.tmp_project_id")
+
+ ActiveRecord::Base.connection.execute <<~SQL
+ WITH cte(project_namespace_id, project_id) AS #{::Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
+ #{projects.to_sql}
+ )
+ UPDATE projects
+ SET project_namespace_id = cte.project_namespace_id
+ FROM cte
+ WHERE id = cte.project_id AND projects.project_namespace_id IS DISTINCT FROM cte.project_namespace_id
+ SQL
+ end
+
+ def batch_update_project_namespaces_traversal_ids(project_ids)
+ namespaces = Namespace.where(tmp_project_id: project_ids)
+ .joins("INNER JOIN namespaces n2 ON namespaces.parent_id = n2.id")
+ .select("namespaces.id as project_namespace_id, n2.traversal_ids")
+
+ ActiveRecord::Base.connection.execute <<~SQL
+ UPDATE namespaces
+ SET traversal_ids = array_append(project_namespaces.traversal_ids, project_namespaces.project_namespace_id)
+ FROM (#{namespaces.to_sql}) as project_namespaces(project_namespace_id, traversal_ids)
+ WHERE id = project_namespaces.project_namespace_id
+ SQL
+ end
+
+ def nullify_project_namespaces_in_projects(project_ids)
+ IsolatedModels::Project.where(id: project_ids).update_all(project_namespace_id: nil)
+ end
+
+ def delete_project_namespace_records(project_ids)
+ project_ids.each_slice(DELETE_BATCH_SIZE) do |p_ids|
+ IsolatedModels::Namespace.where(type: PROJECT_NAMESPACE_STI_NAME).where(tmp_project_id: p_ids).delete_all
+ end
+ end
+
+ def load_project_ids(start_id, end_id, namespace_id)
+ projects = IsolatedModels::Project.arel_table
+ relation = IsolatedModels::Project.where(projects[:id].between(start_id..end_id))
+ relation = relation.where(projects[:namespace_id].in(Arel::Nodes::SqlLiteral.new(hierarchy_cte(namespace_id)))) if namespace_id
+
+ @project_ids = relation.pluck(:id)
+ end
+
+ def mark_job_as_succeeded(*arguments)
+ ::Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded('BackfillProjectNamespaces', arguments)
+ end
+
+ def hierarchy_cte(root_namespace_id)
+ <<-SQL
+ WITH RECURSIVE "base_and_descendants" AS (
+ (
+ SELECT "namespaces"."id"
+ FROM "namespaces"
+ WHERE "namespaces"."type" = 'Group' AND "namespaces"."id" = #{root_namespace_id.to_i}
+ )
+ UNION
+ (
+ SELECT "namespaces"."id"
+ FROM "namespaces", "base_and_descendants"
+ WHERE "namespaces"."type" = 'Group' AND "namespaces"."parent_id" = "base_and_descendants"."id"
+ )
+ )
+ SELECT "id" FROM "base_and_descendants" AS "namespaces"
+ SQL
+ end
+ end
+ # rubocop: enable Metrics/ClassLength
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/project_namespaces/models/namespace.rb b/lib/gitlab/background_migration/project_namespaces/models/namespace.rb
new file mode 100644
index 00000000000..5576c34cf65
--- /dev/null
+++ b/lib/gitlab/background_migration/project_namespaces/models/namespace.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ module ProjectNamespaces
+ module Models
+ # isolated Namespace model
+ class Namespace < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'namespaces'
+ self.inheritance_column = :_type_disabled
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/project_namespaces/models/project.rb b/lib/gitlab/background_migration/project_namespaces/models/project.rb
new file mode 100644
index 00000000000..4a6a309e289
--- /dev/null
+++ b/lib/gitlab/background_migration/project_namespaces/models/project.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ module ProjectNamespaces
+ module Models
+ # isolated Project model
+ class Project < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'projects'
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb b/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb
index ca61118a06c..15799659b55 100644
--- a/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb
+++ b/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb
@@ -2,7 +2,7 @@
# rubocop: disable Style/Documentation
class Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings
- DELETE_BATCH_SIZE = 100
+ DELETE_BATCH_SIZE = 50
# rubocop:disable Gitlab/NamespacedClass
class VulnerabilitiesFinding < ActiveRecord::Base
@@ -10,6 +10,12 @@ class Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings
end
# rubocop:enable Gitlab/NamespacedClass
+ # rubocop:disable Gitlab/NamespacedClass
+ class Vulnerability < ActiveRecord::Base
+ self.table_name = "vulnerabilities"
+ end
+ # rubocop:enable Gitlab/NamespacedClass
+
def perform(start_id, end_id)
batch = VulnerabilitiesFinding.where(id: start_id..end_id)
@@ -40,11 +46,19 @@ class Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings
ids_to_delete.concat(duplicate_ids)
if ids_to_delete.size == DELETE_BATCH_SIZE
- VulnerabilitiesFinding.where(id: ids_to_delete).delete_all
+ delete_findings_and_vulnerabilities(ids_to_delete)
ids_to_delete.clear
end
end
- VulnerabilitiesFinding.where(id: ids_to_delete).delete_all if ids_to_delete.any?
+ delete_findings_and_vulnerabilities(ids_to_delete) if ids_to_delete.any?
+ end
+
+ private
+
+ def delete_findings_and_vulnerabilities(ids)
+ vulnerability_ids = VulnerabilitiesFinding.where(id: ids).pluck(:vulnerability_id).compact
+ VulnerabilitiesFinding.where(id: ids).delete_all
+ Vulnerability.where(id: vulnerability_ids).delete_all
end
end
diff --git a/lib/gitlab/background_migration/remove_undefined_occurrence_confidence_level.rb b/lib/gitlab/background_migration/remove_undefined_occurrence_confidence_level.rb
deleted file mode 100644
index 540ffc6f548..00000000000
--- a/lib/gitlab/background_migration/remove_undefined_occurrence_confidence_level.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class RemoveUndefinedOccurrenceConfidenceLevel
- def perform(start_id, stop_id)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::RemoveUndefinedOccurrenceConfidenceLevel.prepend_mod_with('Gitlab::BackgroundMigration::RemoveUndefinedOccurrenceConfidenceLevel')
diff --git a/lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb b/lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb
deleted file mode 100644
index cecb385afa0..00000000000
--- a/lib/gitlab/background_migration/remove_undefined_occurrence_severity_level.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class RemoveUndefinedOccurrenceSeverityLevel
- def perform(start_id, stop_id)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::RemoveUndefinedOccurrenceSeverityLevel.prepend_mod_with('Gitlab::BackgroundMigration::RemoveUndefinedOccurrenceSeverityLevel')
diff --git a/lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb b/lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb
deleted file mode 100644
index 1ea483f929f..00000000000
--- a/lib/gitlab/background_migration/remove_undefined_vulnerability_severity_level.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class RemoveUndefinedVulnerabilitySeverityLevel
- def perform(start_id, stop_id)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilitySeverityLevel.prepend_mod_with('Gitlab::BackgroundMigration::RemoveUndefinedVulnerabilitySeverityLevel')
diff --git a/lib/gitlab/background_migration/set_default_iteration_cadences.rb b/lib/gitlab/background_migration/set_default_iteration_cadences.rb
deleted file mode 100644
index 42f9d33ab71..00000000000
--- a/lib/gitlab/background_migration/set_default_iteration_cadences.rb
+++ /dev/null
@@ -1,60 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # rubocop:disable Style/Documentation
- class SetDefaultIterationCadences
- class Iteration < ApplicationRecord
- self.table_name = 'sprints'
- end
-
- class IterationCadence < ApplicationRecord
- self.table_name = 'iterations_cadences'
-
- include BulkInsertSafe
- end
-
- class Group < ApplicationRecord
- self.table_name = 'namespaces'
-
- self.inheritance_column = :_type_disabled
- end
-
- def perform(*group_ids)
- create_iterations_cadences(group_ids)
- assign_iterations_cadences(group_ids)
- end
-
- private
-
- def create_iterations_cadences(group_ids)
- groups_with_cadence = IterationCadence.select(:group_id)
-
- new_cadences = Group.where(id: group_ids).where.not(id: groups_with_cadence).map do |group|
- last_iteration = Iteration.where(group_id: group.id).order(:start_date)&.last
-
- next unless last_iteration
-
- time = Time.now
- IterationCadence.new(
- group_id: group.id,
- title: "#{group.name} Iterations",
- start_date: last_iteration.start_date,
- last_run_date: last_iteration.start_date,
- automatic: false,
- created_at: time,
- updated_at: time
- )
- end
-
- IterationCadence.bulk_insert!(new_cadences.compact, skip_duplicates: true)
- end
-
- def assign_iterations_cadences(group_ids)
- IterationCadence.where(group_id: group_ids).each do |cadence|
- Iteration.where(iterations_cadence_id: nil).where(group_id: cadence.group_id).update_all(iterations_cadence_id: cadence.id)
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value.rb b/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value.rb
deleted file mode 100644
index 71f3483987e..00000000000
--- a/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # This class is responsible for migrating a range of merge request diffs
- # with external_diff_store == NULL to 1.
- #
- # The index `index_merge_request_diffs_external_diff_store_is_null` is
- # expected to be used to find the rows here and in the migration scheduling
- # the jobs that run this class.
- class SetNullExternalDiffStoreToLocalValue
- LOCAL_STORE = 1 # equal to ObjectStorage::Store::LOCAL
-
- # Temporary AR class for merge request diffs
- class MergeRequestDiff < ActiveRecord::Base
- self.table_name = 'merge_request_diffs'
- end
-
- def perform(start_id, stop_id)
- MergeRequestDiff.where(external_diff_store: nil, id: start_id..stop_id).update_all(external_diff_store: LOCAL_STORE)
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value.rb b/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value.rb
deleted file mode 100644
index c485c23f3be..00000000000
--- a/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # This class is responsible for migrating a range of package files
- # with file_store == NULL to 1.
- #
- # The index `index_packages_package_files_file_store_is_null` is
- # expected to be used to find the rows here and in the migration scheduling
- # the jobs that run this class.
- class SetNullPackageFilesFileStoreToLocalValue
- LOCAL_STORE = 1 # equal to ObjectStorage::Store::LOCAL
-
- module Packages
- # Temporary AR class for package files
- class PackageFile < ActiveRecord::Base
- self.table_name = 'packages_package_files'
- end
- end
-
- def perform(start_id, stop_id)
- Packages::PackageFile.where(file_store: nil, id: start_id..stop_id).update_all(file_store: LOCAL_STORE)
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/update_vulnerabilities_to_dismissed.rb b/lib/gitlab/background_migration/update_vulnerabilities_to_dismissed.rb
deleted file mode 100644
index 60adb6b7e3e..00000000000
--- a/lib/gitlab/background_migration/update_vulnerabilities_to_dismissed.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # rubocop: disable Style/Documentation
- class UpdateVulnerabilitiesToDismissed
- def perform(project_id)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::UpdateVulnerabilitiesToDismissed.prepend_mod_with('Gitlab::BackgroundMigration::UpdateVulnerabilitiesToDismissed')
diff --git a/lib/gitlab/background_migration/update_vulnerability_confidence.rb b/lib/gitlab/background_migration/update_vulnerability_confidence.rb
deleted file mode 100644
index 40d29978dd4..00000000000
--- a/lib/gitlab/background_migration/update_vulnerability_confidence.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # rubocop: disable Style/Documentation
- class UpdateVulnerabilityConfidence
- def perform(start_id, stop_id)
- end
- end
- end
-end
-
-Gitlab::BackgroundMigration::UpdateVulnerabilityConfidence.prepend_mod_with('Gitlab::BackgroundMigration::UpdateVulnerabilityConfidence')
diff --git a/lib/gitlab/background_migration/update_vulnerability_occurrences_location.rb b/lib/gitlab/background_migration/update_vulnerability_occurrences_location.rb
new file mode 100644
index 00000000000..458e0537f1c
--- /dev/null
+++ b/lib/gitlab/background_migration/update_vulnerability_occurrences_location.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # rubocop: disable Style/Documentation
+ class UpdateVulnerabilityOccurrencesLocation
+ def perform(start_id, stop_id)
+ end
+ end
+ # rubocop: enable Style/Documentation
+ end
+end
+
+Gitlab::BackgroundMigration::UpdateVulnerabilityOccurrencesLocation.prepend_mod_with('Gitlab::BackgroundMigration::UpdateVulnerabilityOccurrencesLocation')
diff --git a/lib/gitlab/bitbucket_server_import/importer.rb b/lib/gitlab/bitbucket_server_import/importer.rb
index e0eee64dc58..899e2e6c1c5 100644
--- a/lib/gitlab/bitbucket_server_import/importer.rb
+++ b/lib/gitlab/bitbucket_server_import/importer.rb
@@ -461,10 +461,14 @@ module Gitlab
end
def uid(rep_object)
- find_user_id(by: :email, value: rep_object.author_email) unless Feature.enabled?(:bitbucket_server_user_mapping_by_username)
-
- find_user_id(by: :username, value: rep_object.author_username) ||
+ # We want this explicit to only be username on the FF
+ # Otherwise, match email.
+ # There should be no default fall-through on username. Fall-through to import user
+ if Feature.enabled?(:bitbucket_server_user_mapping_by_username)
+ find_user_id(by: :username, value: rep_object.author_username)
+ else
find_user_id(by: :email, value: rep_object.author_email)
+ end
end
end
end
diff --git a/lib/gitlab/blob_helper.rb b/lib/gitlab/blob_helper.rb
index c5b183d113d..9e4ea934edb 100644
--- a/lib/gitlab/blob_helper.rb
+++ b/lib/gitlab/blob_helper.rb
@@ -47,7 +47,7 @@ module Gitlab
end
def image?
- ['.png', '.jpg', '.jpeg', '.gif', '.svg'].include?(extname.downcase)
+ ['.png', '.jpg', '.jpeg', '.gif', '.svg', '.webp'].include?(extname.downcase)
end
# Internal: Lookup mime type for extension.
diff --git a/lib/gitlab/ci/artifact_file_reader.rb b/lib/gitlab/ci/artifact_file_reader.rb
index 3cfed8e5e2c..b0fad026ec5 100644
--- a/lib/gitlab/ci/artifact_file_reader.rb
+++ b/lib/gitlab/ci/artifact_file_reader.rb
@@ -45,14 +45,6 @@ module Gitlab
end
def read_zip_file!(file_path)
- if ::Feature.enabled?(:ci_new_artifact_file_reader, job.project, default_enabled: :yaml)
- read_with_new_artifact_file_reader(file_path)
- else
- read_with_legacy_artifact_file_reader(file_path)
- end
- end
-
- def read_with_new_artifact_file_reader(file_path)
job.artifacts_file.use_open_file do |file|
zip_file = Zip::File.new(file, false, true)
entry = zip_file.find_entry(file_path)
@@ -69,25 +61,6 @@ module Gitlab
end
end
- def read_with_legacy_artifact_file_reader(file_path)
- job.artifacts_file.use_file do |archive_path|
- Zip::File.open(archive_path) do |zip_file|
- entry = zip_file.find_entry(file_path)
- unless entry
- raise Error, "Path `#{file_path}` does not exist inside the `#{job.name}` artifacts archive!"
- end
-
- if entry.name_is_directory?
- raise Error, "Path `#{file_path}` was expected to be a file but it was a directory!"
- end
-
- zip_file.get_input_stream(entry) do |is|
- is.read
- end
- end
- end
- end
-
def max_archive_size_in_mb
ActiveSupport::NumberHelper.number_to_human_size(MAX_ARCHIVE_SIZE)
end
diff --git a/lib/gitlab/ci/artifacts/metrics.rb b/lib/gitlab/ci/artifacts/metrics.rb
index 656f4d2cc13..03459c4bf36 100644
--- a/lib/gitlab/ci/artifacts/metrics.rb
+++ b/lib/gitlab/ci/artifacts/metrics.rb
@@ -6,10 +6,14 @@ module Gitlab
class Metrics
include Gitlab::Utils::StrongMemoize
- def increment_destroyed_artifacts(size)
+ def increment_destroyed_artifacts_count(size)
destroyed_artifacts_counter.increment({}, size.to_i)
end
+ def increment_destroyed_artifacts_bytes(bytes)
+ destroyed_artifacts_bytes_counter.increment({}, bytes)
+ end
+
private
def destroyed_artifacts_counter
@@ -20,6 +24,15 @@ module Gitlab
::Gitlab::Metrics.counter(name, comment)
end
end
+
+ def destroyed_artifacts_bytes_counter
+ strong_memoize(:destroyed_artifacts_bytes_counter) do
+ name = :destroyed_job_artifacts_bytes_total
+ comment = 'Counter of bytes of destroyed expired job artifacts'
+
+ ::Gitlab::Metrics.counter(name, comment)
+ end
+ end
end
end
end
diff --git a/lib/gitlab/ci/build/auto_retry.rb b/lib/gitlab/ci/build/auto_retry.rb
index 6ab567dff7c..4950a7616c8 100644
--- a/lib/gitlab/ci/build/auto_retry.rb
+++ b/lib/gitlab/ci/build/auto_retry.rb
@@ -10,7 +10,9 @@ class Gitlab::Ci::Build::AutoRetry
RETRY_OVERRIDES = {
ci_quota_exceeded: 0,
no_matching_runner: 0,
- missing_dependency_failure: 0
+ missing_dependency_failure: 0,
+ forward_deployment_failure: 0,
+ environment_creation_failure: 0
}.freeze
def initialize(build)
diff --git a/lib/gitlab/ci/build/context/base.rb b/lib/gitlab/ci/build/context/base.rb
index 02b97ea76e9..c7ea7c78e2f 100644
--- a/lib/gitlab/ci/build/context/base.rb
+++ b/lib/gitlab/ci/build/context/base.rb
@@ -5,6 +5,8 @@ module Gitlab
module Build
module Context
class Base
+ include Gitlab::Utils::StrongMemoize
+
attr_reader :pipeline
def initialize(pipeline)
@@ -15,6 +17,26 @@ module Gitlab
raise NotImplementedError
end
+ def project
+ pipeline.project
+ end
+
+ def sha
+ pipeline.sha
+ end
+
+ def top_level_worktree_paths
+ strong_memoize(:top_level_worktree_paths) do
+ project.repository.tree(sha).blobs.map(&:path)
+ end
+ end
+
+ def all_worktree_paths
+ strong_memoize(:all_worktree_paths) do
+ project.repository.ls_files(sha)
+ end
+ end
+
protected
def pipeline_attributes
diff --git a/lib/gitlab/ci/build/image.rb b/lib/gitlab/ci/build/image.rb
index 1d7bfba75cd..8ddcf1d523e 100644
--- a/lib/gitlab/ci/build/image.rb
+++ b/lib/gitlab/ci/build/image.rb
@@ -4,7 +4,7 @@ module Gitlab
module Ci
module Build
class Image
- attr_reader :alias, :command, :entrypoint, :name, :ports
+ attr_reader :alias, :command, :entrypoint, :name, :ports, :variables
class << self
def from_image(job)
@@ -33,6 +33,7 @@ module Gitlab
@entrypoint = image[:entrypoint]
@name = image[:name]
@ports = build_ports(image).select(&:valid?)
+ @variables = build_variables(image)
end
end
@@ -45,6 +46,12 @@ module Gitlab
def build_ports(image)
image[:ports].to_a.map { |port| ::Gitlab::Ci::Build::Port.new(port) }
end
+
+ def build_variables(image)
+ image[:variables].to_a.map do |key, value|
+ { key: key, value: value.to_s }
+ end
+ end
end
end
end
diff --git a/lib/gitlab/ci/build/rules/rule/clause/exists.rb b/lib/gitlab/ci/build/rules/rule/clause/exists.rb
index 85e77438f51..e2b54797dc8 100644
--- a/lib/gitlab/ci/build/rules/rule/clause/exists.rb
+++ b/lib/gitlab/ci/build/rules/rule/clause/exists.rb
@@ -15,19 +15,21 @@ module Gitlab
@exact_globs, @pattern_globs = globs.partition(&method(:exact_glob?))
end
- def satisfied_by?(pipeline, context)
- paths = worktree_paths(pipeline)
+ def satisfied_by?(_pipeline, context)
+ paths = worktree_paths(context)
exact_matches?(paths) || pattern_matches?(paths)
end
private
- def worktree_paths(pipeline)
+ def worktree_paths(context)
+ return unless context.project
+
if @top_level_only
- pipeline.top_level_worktree_paths
+ context.top_level_worktree_paths
else
- pipeline.all_worktree_paths
+ context.all_worktree_paths
end
end
diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb
index aceaf012f7e..6f149385969 100644
--- a/lib/gitlab/ci/config.rb
+++ b/lib/gitlab/ci/config.rb
@@ -19,11 +19,12 @@ module Gitlab
attr_reader :root, :context, :source_ref_path, :source
- def initialize(config, project: nil, sha: nil, user: nil, parent_pipeline: nil, source_ref_path: nil, source: nil)
- @context = build_context(project: project, sha: sha, user: user, parent_pipeline: parent_pipeline, ref: source_ref_path)
+ def initialize(config, project: nil, pipeline: nil, sha: nil, user: nil, parent_pipeline: nil, source: nil)
+ @source_ref_path = pipeline&.source_ref_path
+
+ @context = build_context(project: project, pipeline: pipeline, sha: sha, user: user, parent_pipeline: parent_pipeline)
@context.set_deadline(TIMEOUT_SECONDS)
- @source_ref_path = source_ref_path
@source = source
@config = expand_config(config)
@@ -108,16 +109,16 @@ module Gitlab
end
end
- def build_context(project:, sha:, user:, parent_pipeline:, ref:)
+ def build_context(project:, pipeline:, sha:, user:, parent_pipeline:)
Config::External::Context.new(
project: project,
sha: sha || find_sha(project),
user: user,
parent_pipeline: parent_pipeline,
- variables: build_variables(project: project, ref: ref))
+ variables: build_variables(project: project, pipeline: pipeline))
end
- def build_variables(project:, ref:)
+ def build_variables(project:, pipeline:)
Gitlab::Ci::Variables::Collection.new.tap do |variables|
break variables unless project
@@ -126,18 +127,12 @@ module Gitlab
#
# See more detail in the docs: https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
variables.concat(project.predefined_variables)
- variables.concat(pipeline_predefined_variables(ref: ref))
- variables.concat(project.ci_instance_variables_for(ref: ref))
- variables.concat(project.group.ci_variables_for(ref, project)) if project.group
- variables.concat(project.ci_variables_for(ref: ref))
- end
- end
-
- # https://gitlab.com/gitlab-org/gitlab/-/issues/337633 aims to add all predefined variables
- # to this list, but only CI_COMMIT_REF_NAME is available right now to support compliance pipelines.
- def pipeline_predefined_variables(ref:)
- Gitlab::Ci::Variables::Collection.new.tap do |v|
- v.append(key: 'CI_COMMIT_REF_NAME', value: ref)
+ variables.concat(pipeline.predefined_variables) if pipeline
+ variables.concat(project.ci_instance_variables_for(ref: source_ref_path))
+ variables.concat(project.group.ci_variables_for(source_ref_path, project)) if project.group
+ variables.concat(project.ci_variables_for(ref: source_ref_path))
+ variables.concat(pipeline.variables) if pipeline
+ variables.concat(pipeline.pipeline_schedule.job_variables) if pipeline&.pipeline_schedule
end
end
diff --git a/lib/gitlab/ci/config/entry/include/rules/rule.rb b/lib/gitlab/ci/config/entry/include/rules/rule.rb
index d3d0f098814..fa99a7204d6 100644
--- a/lib/gitlab/ci/config/entry/include/rules/rule.rb
+++ b/lib/gitlab/ci/config/entry/include/rules/rule.rb
@@ -9,9 +9,9 @@ module Gitlab
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
- ALLOWED_KEYS = %i[if].freeze
+ ALLOWED_KEYS = %i[if exists].freeze
- attributes :if
+ attributes :if, :exists
validations do
validates :config, presence: true
diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb
index f867189d521..75bbe2ccb1b 100644
--- a/lib/gitlab/ci/config/entry/job.rb
+++ b/lib/gitlab/ci/config/entry/job.rb
@@ -14,10 +14,10 @@ module Gitlab
ALLOWED_KEYS = %i[tags script type image services start_in artifacts
cache dependencies before_script after_script
environment coverage retry parallel interruptible timeout
- release dast_configuration secrets].freeze
+ release].freeze
validations do
- validates :config, allowed_keys: ALLOWED_KEYS + PROCESSABLE_ALLOWED_KEYS
+ validates :config, allowed_keys: Gitlab::Ci::Config::Entry::Job.allowed_keys + PROCESSABLE_ALLOWED_KEYS
validates :script, presence: true
with_options allow_nil: true do
@@ -178,6 +178,10 @@ module Gitlab
allow_failure_defined? ? static_allow_failure : manual_action?
end
+ def self.allowed_keys
+ ALLOWED_KEYS
+ end
+
private
def allow_failure_criteria
diff --git a/lib/gitlab/ci/config/entry/processable.rb b/lib/gitlab/ci/config/entry/processable.rb
index 2549c35ebd6..520b1ce6119 100644
--- a/lib/gitlab/ci/config/entry/processable.rb
+++ b/lib/gitlab/ci/config/entry/processable.rb
@@ -23,6 +23,7 @@ module Gitlab
validates :config, presence: true
validates :name, presence: true
validates :name, type: Symbol
+ validates :name, length: { maximum: 255 }, if: -> { ::Feature.enabled?(:ci_validate_job_length, default_enabled: :yaml) }
validates :config, disallowed_keys: {
in: %i[only except when start_in],
diff --git a/lib/gitlab/ci/config/entry/service.rb b/lib/gitlab/ci/config/entry/service.rb
index 247bf930d3b..f27dca4986e 100644
--- a/lib/gitlab/ci/config/entry/service.rb
+++ b/lib/gitlab/ci/config/entry/service.rb
@@ -15,7 +15,7 @@ module Gitlab
include ::Gitlab::Config::Entry::Attributable
include ::Gitlab::Config::Entry::Configurable
- ALLOWED_KEYS = %i[name entrypoint command alias ports].freeze
+ ALLOWED_KEYS = %i[name entrypoint command alias ports variables].freeze
validations do
validates :config, hash_or_string: true
@@ -32,6 +32,10 @@ module Gitlab
entry :ports, Entry::Ports,
description: 'Ports used to expose the service'
+ entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
+ description: 'Environment variables available for this service.',
+ inherit: false
+
attributes :ports
def alias
diff --git a/lib/gitlab/ci/config/external/context.rb b/lib/gitlab/ci/config/external/context.rb
index e0adb1b19c2..51624dc30ea 100644
--- a/lib/gitlab/ci/config/external/context.rb
+++ b/lib/gitlab/ci/config/external/context.rb
@@ -5,6 +5,8 @@ module Gitlab
class Config
module External
class Context
+ include Gitlab::Utils::StrongMemoize
+
TimeoutError = Class.new(StandardError)
attr_reader :project, :sha, :user, :parent_pipeline, :variables
@@ -22,6 +24,18 @@ module Gitlab
yield self if block_given?
end
+ def top_level_worktree_paths
+ strong_memoize(:top_level_worktree_paths) do
+ project.repository.tree(sha).blobs.map(&:path)
+ end
+ end
+
+ def all_worktree_paths
+ strong_memoize(:all_worktree_paths) do
+ project.repository.ls_files(sha)
+ end
+ end
+
def mutate(attrs = {})
self.class.new(**attrs) do |ctx|
ctx.expandset = expandset
diff --git a/lib/gitlab/ci/parsers/security/common.rb b/lib/gitlab/ci/parsers/security/common.rb
index 1cf4f252ab9..0c969daf7fd 100644
--- a/lib/gitlab/ci/parsers/security/common.rb
+++ b/lib/gitlab/ci/parsers/security/common.rb
@@ -33,8 +33,7 @@ module Gitlab
report_data
rescue JSON::ParserError
raise SecurityReportParserError, 'JSON parsing failed'
- rescue StandardError => e
- Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
+ rescue StandardError
raise SecurityReportParserError, "#{report.type} security report parsing failed"
end
@@ -115,7 +114,7 @@ module Gitlab
flags: flags,
links: links,
remediations: remediations,
- raw_metadata: data.to_json,
+ original_data: data,
metadata_version: report_version,
details: data['details'] || {},
signatures: signatures,
diff --git a/lib/gitlab/ci/parsers/security/validators/schema_validator.rb b/lib/gitlab/ci/parsers/security/validators/schema_validator.rb
index 143b930c669..73cfa02ce4b 100644
--- a/lib/gitlab/ci/parsers/security/validators/schema_validator.rb
+++ b/lib/gitlab/ci/parsers/security/validators/schema_validator.rb
@@ -34,7 +34,7 @@ module Gitlab
end
def file_name
- "#{report_type}.json"
+ "#{report_type.to_s.dasherize}-report-format.json"
end
end
diff --git a/lib/gitlab/ci/parsers/security/validators/schemas/sast.json b/lib/gitlab/ci/parsers/security/validators/schemas/sast-report-format.json
index a7159be0190..a7159be0190 100644
--- a/lib/gitlab/ci/parsers/security/validators/schemas/sast.json
+++ b/lib/gitlab/ci/parsers/security/validators/schemas/sast-report-format.json
diff --git a/lib/gitlab/ci/parsers/security/validators/schemas/secret_detection.json b/lib/gitlab/ci/parsers/security/validators/schemas/secret-detection-report-format.json
index 462e23a151c..462e23a151c 100644
--- a/lib/gitlab/ci/parsers/security/validators/schemas/secret_detection.json
+++ b/lib/gitlab/ci/parsers/security/validators/schemas/secret-detection-report-format.json
diff --git a/lib/gitlab/ci/pipeline/chain/command.rb b/lib/gitlab/ci/pipeline/chain/command.rb
index c9bc4ec411d..beb8801096b 100644
--- a/lib/gitlab/ci/pipeline/chain/command.rb
+++ b/lib/gitlab/ci/pipeline/chain/command.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -144,3 +145,5 @@ module Gitlab
end
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/ci/pipeline/chain/config/process.rb b/lib/gitlab/ci/pipeline/chain/config/process.rb
index 5251dd3d40a..f3c937ddd28 100644
--- a/lib/gitlab/ci/pipeline/chain/config/process.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/process.rb
@@ -14,7 +14,7 @@ module Gitlab
result = ::Gitlab::Ci::YamlProcessor.new(
@command.config_content, {
project: project,
- source_ref_path: @pipeline.source_ref_path,
+ pipeline: @pipeline,
sha: @pipeline.sha,
source: @pipeline.source,
user: current_user,
diff --git a/lib/gitlab/ci/pipeline/chain/create_cross_database_associations.rb b/lib/gitlab/ci/pipeline/chain/create_cross_database_associations.rb
new file mode 100644
index 00000000000..bb5b4e722b7
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/create_cross_database_associations.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class CreateCrossDatabaseAssociations < Chain::Base
+ def perform!
+ # to be overridden in EE
+ end
+
+ def break?
+ false # to be overridden in EE
+ end
+ end
+ end
+ end
+ end
+end
+
+Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations.prepend_mod_with('Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations')
diff --git a/lib/gitlab/ci/pipeline/metrics.rb b/lib/gitlab/ci/pipeline/metrics.rb
index 321efa7854f..b5e48f210ad 100644
--- a/lib/gitlab/ci/pipeline/metrics.rb
+++ b/lib/gitlab/ci/pipeline/metrics.rb
@@ -51,6 +51,15 @@ module Gitlab
::Gitlab::Metrics.histogram(name, comment, labels, buckets)
end
+ def self.pipeline_builder_scoped_variables_histogram
+ name = :gitlab_ci_pipeline_builder_scoped_variables_duration
+ comment = 'Pipeline variables builder scoped_variables duration'
+ labels = {}
+ buckets = [0.01, 0.05, 0.1, 0.3, 0.5, 1, 2, 5, 10, 30, 60, 120]
+
+ ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
+ end
+
def self.pipeline_processing_events_counter
name = :gitlab_ci_pipeline_processing_events_total
comment = 'Total amount of pipeline processing events'
diff --git a/lib/gitlab/ci/pipeline/seed/build.rb b/lib/gitlab/ci/pipeline/seed/build.rb
index 9ad5d6538b7..72837b8ec22 100644
--- a/lib/gitlab/ci/pipeline/seed/build.rb
+++ b/lib/gitlab/ci/pipeline/seed/build.rb
@@ -11,11 +11,11 @@ module Gitlab
delegate :dig, to: :@seed_attributes
- def initialize(context, attributes, previous_stages, current_stage)
+ def initialize(context, attributes, stages_for_needs_lookup = [])
@context = context
@pipeline = context.pipeline
@seed_attributes = attributes
- @stages_for_needs_lookup = (previous_stages + [current_stage]).compact
+ @stages_for_needs_lookup = stages_for_needs_lookup.compact
@needs_attributes = dig(:needs_attributes)
@resource_group_key = attributes.delete(:resource_group_key)
@job_variables = @seed_attributes.delete(:job_variables)
@@ -90,7 +90,7 @@ module Gitlab
::Ci::Bridge.new(attributes)
else
::Ci::Build.new(attributes).tap do |build|
- build.assign_attributes(self.class.environment_attributes_for(build))
+ build.assign_attributes(self.class.deployment_attributes_for(build))
end
end
end
@@ -101,10 +101,10 @@ module Gitlab
.to_resource
end
- def self.environment_attributes_for(build)
+ def self.deployment_attributes_for(build, environment = nil)
return {} unless build.has_environment?
- environment = Seed::Environment.new(build).to_resource
+ environment = Seed::Environment.new(build).to_resource if environment.nil?
unless environment.persisted?
if Feature.enabled?(:surface_environment_creation_failure, build.project, default_enabled: :yaml) &&
@@ -173,7 +173,7 @@ module Gitlab
end
def variable_expansion_errors
- expanded_collection = evaluate_context.variables.sort_and_expand_all(@pipeline.project)
+ expanded_collection = evaluate_context.variables.sort_and_expand_all
errors = expanded_collection.errors
["#{name}: #{errors}"] if errors
end
@@ -244,5 +244,3 @@ module Gitlab
end
end
end
-
-Gitlab::Ci::Pipeline::Seed::Build.prepend_mod_with('Gitlab::Ci::Pipeline::Seed::Build')
diff --git a/lib/gitlab/ci/pipeline/seed/stage.rb b/lib/gitlab/ci/pipeline/seed/stage.rb
index 018fb260986..bc56fe9bef9 100644
--- a/lib/gitlab/ci/pipeline/seed/stage.rb
+++ b/lib/gitlab/ci/pipeline/seed/stage.rb
@@ -17,7 +17,7 @@ module Gitlab
@previous_stages = previous_stages
@builds = attributes.fetch(:builds).map do |attributes|
- Seed::Build.new(context, attributes, previous_stages, self)
+ Seed::Build.new(context, attributes, previous_stages + [self])
end
end
diff --git a/lib/gitlab/ci/reports/security/finding.rb b/lib/gitlab/ci/reports/security/finding.rb
index 39531e12f69..47ec82ac86c 100644
--- a/lib/gitlab/ci/reports/security/finding.rb
+++ b/lib/gitlab/ci/reports/security/finding.rb
@@ -17,7 +17,6 @@ module Gitlab
attr_reader :name
attr_reader :old_location
attr_reader :project_fingerprint
- attr_reader :raw_metadata
attr_reader :report_type
attr_reader :scanner
attr_reader :scan
@@ -28,10 +27,13 @@ module Gitlab
attr_reader :details
attr_reader :signatures
attr_reader :project_id
+ attr_reader :original_data
delegate :file_path, :start_line, :end_line, to: :location
- def initialize(compare_key:, identifiers:, flags: [], links: [], remediations: [], location:, metadata_version:, name:, raw_metadata:, report_type:, scanner:, scan:, uuid:, confidence: nil, severity: nil, details: {}, signatures: [], project_id: nil, vulnerability_finding_signatures_enabled: false) # rubocop:disable Metrics/ParameterLists
+ alias_method :cve, :compare_key
+
+ def initialize(compare_key:, identifiers:, flags: [], links: [], remediations: [], location:, metadata_version:, name:, original_data:, report_type:, scanner:, scan:, uuid:, confidence: nil, severity: nil, details: {}, signatures: [], project_id: nil, vulnerability_finding_signatures_enabled: false) # rubocop:disable Metrics/ParameterLists
@compare_key = compare_key
@confidence = confidence
@identifiers = identifiers
@@ -40,7 +42,7 @@ module Gitlab
@location = location
@metadata_version = metadata_version
@name = name
- @raw_metadata = raw_metadata
+ @original_data = original_data
@report_type = report_type
@scanner = scanner
@scan = scan
@@ -74,6 +76,10 @@ module Gitlab
uuid
details
signatures
+ description
+ message
+ cve
+ solution
].each_with_object({}) do |key, hash|
hash[key] = public_send(key) # rubocop:disable GitlabSecurity/PublicSend
end
@@ -88,8 +94,8 @@ module Gitlab
@location = new_location
end
- def unsafe?(severity_levels)
- severity.in?(severity_levels)
+ def unsafe?(severity_levels, report_types)
+ severity.to_s.in?(severity_levels) && (report_types.blank? || report_type.to_s.in?(report_types) )
end
def eql?(other)
@@ -141,6 +147,30 @@ module Gitlab
scanner <=> other.scanner
end
+ def has_signatures?
+ signatures.present?
+ end
+
+ def raw_metadata
+ @raw_metadata ||= original_data.to_json
+ end
+
+ def description
+ original_data['description']
+ end
+
+ def message
+ original_data['message']
+ end
+
+ def solution
+ original_data['solution']
+ end
+
+ def location_data
+ original_data['location']
+ end
+
private
def generate_project_fingerprint
diff --git a/lib/gitlab/ci/reports/security/report.rb b/lib/gitlab/ci/reports/security/report.rb
index 1ba2d909d99..417319cb5be 100644
--- a/lib/gitlab/ci/reports/security/report.rb
+++ b/lib/gitlab/ci/reports/security/report.rb
@@ -69,6 +69,10 @@ module Gitlab
primary_scanner <=> other.primary_scanner
end
+
+ def has_signatures?
+ findings.any?(&:has_signatures?)
+ end
end
end
end
diff --git a/lib/gitlab/ci/reports/security/reports.rb b/lib/gitlab/ci/reports/security/reports.rb
index b7a5e36b108..b6372349f68 100644
--- a/lib/gitlab/ci/reports/security/reports.rb
+++ b/lib/gitlab/ci/reports/security/reports.rb
@@ -22,21 +22,24 @@ module Gitlab
reports.values.flat_map(&:findings)
end
- def violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels)
- unsafe_findings_count(target_reports, severity_levels) > vulnerabilities_allowed
+ def violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels, vulnerability_states, report_types = [])
+ unsafe_findings_count(target_reports, severity_levels, vulnerability_states, report_types) > vulnerabilities_allowed
end
- private
-
- def findings_diff(target_reports)
- findings - target_reports&.findings.to_a
+ def unsafe_findings_uuids(severity_levels, report_types)
+ findings.select { |finding| finding.unsafe?(severity_levels, report_types) }.map(&:uuid)
end
- def unsafe_findings_count(target_reports, severity_levels)
- findings_diff(target_reports).count {|finding| finding.unsafe?(severity_levels)}
+ private
+
+ def unsafe_findings_count(target_reports, severity_levels, vulnerability_states, report_types)
+ new_uuids = unsafe_findings_uuids(severity_levels, report_types) - target_reports&.unsafe_findings_uuids(severity_levels, report_types).to_a
+ new_uuids.count
end
end
end
end
end
end
+
+Gitlab::Ci::Reports::Security::Reports.prepend_mod_with('Gitlab::Ci::Reports::Security::Reports')
diff --git a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
index adb5d430d46..89fd59d98f4 100644
--- a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
@@ -179,3 +179,11 @@ include:
- template: Security/License-Scanning.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml
- template: Security/SAST.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
- template: Security/Secret-Detection.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml
+
+# The latest build job generates a dotenv report artifact with a CI_APPLICATION_TAG
+# that also includes the image digest. This configures Auto Deploy to receive
+# this artifact and use the updated CI_APPLICATION_TAG for deployments.
+.auto-deploy:
+ dependencies: [build]
+dast_environment_deploy:
+ dependencies: [build]
diff --git a/lib/gitlab/ci/templates/Django.gitlab-ci.yml b/lib/gitlab/ci/templates/Django.gitlab-ci.yml
index f147ad9332d..426076c84a1 100644
--- a/lib/gitlab/ci/templates/Django.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Django.gitlab-ci.yml
@@ -1,54 +1,76 @@
-# To contribute improvements to CI/CD templates, please follow the Development guide at:
-# https://docs.gitlab.com/ee/development/cicd/templates.html
-# This specific template is located at:
-# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Django.gitlab-ci.yml
-
-# Official framework image. Look for the different tagged releases at:
-# https://hub.docker.com/r/library/python
-image: python:latest
-
-# Pick zero or more services to be used on all builds.
-# Only needed when using a docker container to run your tests in.
-# Check out: http://docs.gitlab.com/ee/ci/docker/using_docker_images.html#what-is-a-service
-services:
- - mysql:latest
- - postgres:latest
+# This example is for testing Django with MySQL.
+#
+# The test CI/CD variables MYSQL_DB, MYSQL_USER and MYSQL_PASS can be set in the project settings at:
+# Settings --> CI/CD --> Variables
+#
+# The Django settings in settings.py, used in tests, might look similar to:
+#
+# DATABASES = {
+# 'default': {
+# 'ENGINE': 'django.db.backends.mysql',
+# 'NAME': os.environ.get('MYSQL_DATABASE'),
+# 'USER': os.environ.get('MYSQL_USER'),
+# 'PASSWORD': os.environ.get('MYSQL_PASSWORD'),
+# 'HOST': 'mysql',
+# 'PORT': '3306',
+# 'CONN_MAX_AGE':60,
+# },
+# }
+#
+# It is possible to use '--settings' to specify a custom settings file on the command line below or use an environment
+# variable to trigger an include on the bottom of your settings.py:
+# if os.environ.get('DJANGO_CONFIG')=='test':
+# from .settings_test import *
+#
+# It is also possible to hardcode the database name and credentials in the settings.py file and in the .gitlab-ci.yml file.
+#
+# The mysql service needs some variables too. See https://hub.docker.com/_/mysql for possible mysql env variables
+# Note that when using a service in GitLab CI/CD that needs environment variables to run, only variables defined in
+# .gitlab-ci.yml are passed to the service and variables defined in the GitLab UI are not.
+# https://gitlab.com/gitlab-org/gitlab/-/issues/30178
variables:
- POSTGRES_DB: database_name
+ # DJANGO_CONFIG: "test"
+ MYSQL_DATABASE: $MYSQL_DB
+ MYSQL_ROOT_PASSWORD: $MYSQL_PASS
+ MYSQL_USER: $MYSQL_USER
+ MYSQL_PASSWORD: $MYSQL_PASS
-# This folder is cached between builds
-# https://docs.gitlab.com/ee/ci/yaml/index.html#cache
-cache:
- paths:
- - ~/.cache/pip/
+default:
+ image: ubuntu:20.04
+ #
+ # Pick zero or more services to be used on all builds.
+ # Only needed when using a docker container to run your tests in.
+ # Check out: http://docs.gitlab.com/ee/ci/docker/using_docker_images.html#what-is-a-service
+ services:
+ - mysql:8.0
+ #
+ # This folder is cached between builds
+ # http://docs.gitlab.com/ee/ci/yaml/README.html#cache
+ cache:
+ paths:
+ - ~/.cache/pip/
+ before_script:
+ - apt -y update
+ - apt -y install apt-utils
+ - apt -y install net-tools python3.8 python3-pip mysql-client libmysqlclient-dev
+ - apt -y upgrade
+ - pip3 install -r requirements.txt
-# This is a basic example for a gem or script which doesn't use
-# services such as redis or postgres
-before_script:
- - python -V # Print out python version for debugging
- # Uncomment next line if your Django app needs a JS runtime:
- # - apt-get update -q && apt-get install nodejs -yqq
- - pip install -r requirements.txt
-# To get Django tests to work you may need to create a settings file using
-# the following DATABASES:
-#
-# DATABASES = {
-# 'default': {
-# 'ENGINE': 'django.db.backends.postgresql_psycopg2',
-# 'NAME': 'ci',
-# 'USER': 'postgres',
-# 'PASSWORD': 'postgres',
-# 'HOST': 'postgres',
-# 'PORT': '5432',
-# },
-# }
-#
-# and then adding `--settings app.settings.ci` (or similar) to the test command
+migrations:
+ stage: build
+ script:
+ - python3 manage.py makemigrations
+ # - python3 manage.py makemigrations myapp
+ - python3 manage.py migrate
+ - python3 manage.py check
+
-test:
- variables:
- DATABASE_URL: "postgresql://postgres:postgres@postgres:5432/$POSTGRES_DB"
+django-tests:
+ stage: test
script:
- - python manage.py test
+ # The MYSQL user only gets permissions for MYSQL_DB, so Django can't create a test database.
+ - echo "GRANT ALL on *.* to '${MYSQL_USER}';"| mysql -u root --password="${MYSQL_ROOT_PASSWORD}" -h mysql
+ # use python3 explicitly. see https://wiki.ubuntu.com/Python/3
+ - python3 manage.py test
diff --git a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
index 56899614cc6..99fd9870b1d 100644
--- a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
@@ -70,7 +70,7 @@ browser_performance:
reports:
browser_performance: browser-performance.json
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$BROWSER_PERFORMANCE_DISABLED'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.latest.gitlab-ci.yml
index 56899614cc6..99fd9870b1d 100644
--- a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.latest.gitlab-ci.yml
@@ -70,7 +70,7 @@ browser_performance:
reports:
browser_performance: browser-performance.json
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$BROWSER_PERFORMANCE_DISABLED'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml
index 6a3b0cfa9e7..211adc9bd5b 100644
--- a/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml
@@ -3,7 +3,7 @@
# This template is scheduled for removal when testing is complete: https://gitlab.com/gitlab-org/gitlab/-/issues/337987
variables:
- AUTO_BUILD_IMAGE_VERSION: 'v1.3.1'
+ AUTO_BUILD_IMAGE_VERSION: 'v1.5.0'
build:
stage: build
@@ -23,6 +23,9 @@ build:
export CI_APPLICATION_TAG=${CI_APPLICATION_TAG:-$CI_COMMIT_TAG}
fi
- /build/build.sh
+ artifacts:
+ reports:
+ dotenv: gl-auto-build-variables.env
rules:
- if: '$BUILD_DISABLED'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml
index 31ca68c57d7..11f8376f0b4 100644
--- a/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml
@@ -9,6 +9,6 @@ cloud_formation:
rules:
- if: '($AUTO_DEVOPS_PLATFORM_TARGET != "EC2") || ($AUTO_DEVOPS_PLATFORM_TARGET != "ECS")'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
diff --git a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
index 65a58130962..28ac627f103 100644
--- a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
@@ -1,5 +1,5 @@
variables:
- DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.14.0'
+ DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.17.0'
.dast-auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}"
@@ -10,6 +10,7 @@ dast_environment_deploy:
script:
- auto-deploy check_kube_domain
- auto-deploy download_chart
+ - auto-deploy use_kube_context || true
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
@@ -29,7 +30,7 @@ dast_environment_deploy:
- if: $DAST_WEBSITE # we don't need to create a review app if a URL is already given
when: never
- if: $CI_COMMIT_BRANCH &&
- $CI_KUBERNETES_ACTIVE &&
+ ($CI_KUBERNETES_ACTIVE || $KUBECONFIG) &&
$GITLAB_FEATURES =~ /\bdast\b/
stop_dast_environment:
@@ -38,6 +39,7 @@ stop_dast_environment:
variables:
GIT_STRATEGY: none
script:
+ - auto-deploy use_kube_context || true
- auto-deploy initialize_tiller
- auto-deploy delete
environment:
@@ -52,6 +54,6 @@ stop_dast_environment:
- if: $DAST_WEBSITE # we don't need to create a review app if a URL is already given
when: never
- if: $CI_COMMIT_BRANCH &&
- $CI_KUBERNETES_ACTIVE &&
+ ($CI_KUBERNETES_ACTIVE || $KUBECONFIG) &&
$GITLAB_FEATURES =~ /\bdast\b/
when: always
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
index 58f13746a1f..973db26bf2d 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
@@ -1,5 +1,5 @@
variables:
- AUTO_DEPLOY_IMAGE_VERSION: 'v2.14.0'
+ AUTO_DEPLOY_IMAGE_VERSION: 'v2.17.0'
.auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}"
@@ -11,6 +11,7 @@ review:
script:
- auto-deploy check_kube_domain
- auto-deploy download_chart
+ - auto-deploy use_kube_context || true
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
@@ -24,7 +25,7 @@ review:
paths: [environment_url.txt, tiller.log]
when: always
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: never
@@ -38,6 +39,7 @@ stop_review:
variables:
GIT_STRATEGY: none
script:
+ - auto-deploy use_kube_context || true
- auto-deploy initialize_tiller
- auto-deploy delete
environment:
@@ -45,7 +47,7 @@ stop_review:
action: stop
allow_failure: true
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: never
@@ -66,6 +68,7 @@ staging:
script:
- auto-deploy check_kube_domain
- auto-deploy download_chart
+ - auto-deploy use_kube_context || true
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
@@ -74,7 +77,7 @@ staging:
name: staging
url: http://$CI_PROJECT_PATH_SLUG-staging.$KUBE_INGRESS_BASE_DOMAIN
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
@@ -91,6 +94,7 @@ canary:
script:
- auto-deploy check_kube_domain
- auto-deploy download_chart
+ - auto-deploy use_kube_context || true
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
@@ -101,7 +105,7 @@ canary:
rules:
- if: '$CI_DEPLOY_FREEZE != null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
@@ -114,6 +118,7 @@ canary:
script:
- auto-deploy check_kube_domain
- auto-deploy download_chart
+ - auto-deploy use_kube_context || true
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
@@ -132,7 +137,7 @@ production:
rules:
- if: '$CI_DEPLOY_FREEZE != null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$STAGING_ENABLED'
when: never
@@ -150,7 +155,7 @@ production_manual:
rules:
- if: '$CI_DEPLOY_FREEZE != null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$INCREMENTAL_ROLLOUT_ENABLED'
when: never
@@ -168,6 +173,7 @@ production_manual:
script:
- auto-deploy check_kube_domain
- auto-deploy download_chart
+ - auto-deploy use_kube_context || true
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
@@ -188,7 +194,7 @@ production_manual:
rules:
- if: '$CI_DEPLOY_FREEZE != null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$INCREMENTAL_ROLLOUT_MODE == "timed"'
when: never
@@ -203,7 +209,7 @@ production_manual:
rules:
- if: '$CI_DEPLOY_FREEZE != null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$INCREMENTAL_ROLLOUT_MODE == "manual"'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml
index 530ab1d0f99..248040b8b18 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml
@@ -21,7 +21,7 @@ review:
paths: [environment_url.txt, tiller.log]
when: always
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: never
@@ -42,7 +42,7 @@ stop_review:
action: stop
allow_failure: true
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: never
@@ -71,7 +71,7 @@ staging:
name: staging
url: http://$CI_PROJECT_PATH_SLUG-staging.$KUBE_INGRESS_BASE_DOMAIN
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
@@ -96,7 +96,7 @@ canary:
name: production
url: http://$CI_PROJECT_PATH_SLUG.$KUBE_INGRESS_BASE_DOMAIN
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
@@ -125,7 +125,7 @@ canary:
production:
<<: *production_template
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$STAGING_ENABLED'
when: never
@@ -141,7 +141,7 @@ production_manual:
<<: *production_template
allow_failure: false
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$INCREMENTAL_ROLLOUT_ENABLED'
when: never
@@ -177,7 +177,7 @@ production_manual:
resource_group: production
allow_failure: true
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$INCREMENTAL_ROLLOUT_MODE == "timed"'
when: never
@@ -190,7 +190,7 @@ production_manual:
.timed_rollout_template: &timed_rollout_template
<<: *rollout_template
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$INCREMENTAL_ROLLOUT_MODE == "manual"'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml
index 7efbcab221b..ab3bc511cba 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml
@@ -16,7 +16,7 @@ review_ec2:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "EC2"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$REVIEW_DISABLED'
when: never
@@ -32,7 +32,7 @@ production_ec2:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "EC2"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
index 332c58c8695..9bb2ba69d84 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
@@ -42,7 +42,7 @@ review_ecs:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "ECS"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$REVIEW_DISABLED'
when: never
@@ -58,7 +58,7 @@ stop_review_ecs:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "ECS"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$REVIEW_DISABLED'
when: never
@@ -77,7 +77,7 @@ review_fargate:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "FARGATE"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$REVIEW_DISABLED'
when: never
@@ -93,7 +93,7 @@ stop_review_fargate:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "FARGATE"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$REVIEW_DISABLED'
when: never
@@ -107,7 +107,7 @@ production_ecs:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "ECS"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
@@ -118,7 +118,7 @@ production_fargate:
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "FARGATE"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE'
+ - if: '$CI_KUBERNETES_ACTIVE || $KUBECONFIG'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml
index 1ec1aa60d88..d55c126eeb7 100644
--- a/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml
@@ -72,7 +72,7 @@
rules:
- if: '$MIGRATE_HELM_2TO3 != "true"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: never
@@ -89,7 +89,7 @@ review:helm-2to3:cleanup:
rules:
- if: '$MIGRATE_HELM_2TO3 != "true" && $CLEANUP_HELM_2TO3 == null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: never
@@ -104,7 +104,7 @@ review:helm-2to3:cleanup:
rules:
- if: '$MIGRATE_HELM_2TO3 != "true"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
@@ -119,7 +119,7 @@ staging:helm-2to3:cleanup:
rules:
- if: '$MIGRATE_HELM_2TO3 != "true" && $CLEANUP_HELM_2TO3 == null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH'
when: never
@@ -132,7 +132,7 @@ staging:helm-2to3:cleanup:
rules:
- if: '$MIGRATE_HELM_2TO3 != "true"'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: manual
@@ -145,7 +145,7 @@ production:helm-2to3:cleanup:
rules:
- if: '$MIGRATE_HELM_2TO3 != "true" && $CLEANUP_HELM_2TO3 == null'
when: never
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
when: manual
diff --git a/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml
index 9a7c513c25f..8e34388893a 100644
--- a/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml
@@ -23,7 +23,7 @@ load_performance:
reports:
load_performance: load-performance.json
rules:
- - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ - if: '($CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == "") && ($KUBECONFIG == null || $KUBECONFIG == "")'
when: never
- if: '$LOAD_PERFORMANCE_DISABLED'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml
new file mode 100644
index 00000000000..b763705857e
--- /dev/null
+++ b/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml
@@ -0,0 +1,34 @@
+variables:
+ # Setting this variable will affect all Security templates
+ # (SAST, Dependency Scanning, ...)
+ SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
+ SAST_EXCLUDED_PATHS: "spec, test, tests, tmp"
+
+iac-sast:
+ stage: test
+ artifacts:
+ reports:
+ sast: gl-sast-report.json
+ rules:
+ - when: never
+ # `rules` must be overridden explicitly by each child job
+ # see https://gitlab.com/gitlab-org/gitlab/-/issues/218444
+ variables:
+ SEARCH_MAX_DEPTH: 4
+ allow_failure: true
+ script:
+ - /analyzer run
+
+kics-iac-sast:
+ extends: iac-sast
+ image:
+ name: "$SAST_ANALYZER_IMAGE"
+ variables:
+ SAST_ANALYZER_IMAGE_TAG: 0
+ SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/kics:$SAST_ANALYZER_IMAGE_TAG"
+ rules:
+ - if: $SAST_DISABLED
+ when: never
+ - if: $SAST_EXCLUDED_ANALYZERS =~ /kics/
+ when: never
+ - if: $CI_COMMIT_BRANCH
diff --git a/lib/gitlab/ci/templates/Kaniko.gitlab-ci.yml b/lib/gitlab/ci/templates/Kaniko.gitlab-ci.yml
new file mode 100644
index 00000000000..f1b1c20b4e0
--- /dev/null
+++ b/lib/gitlab/ci/templates/Kaniko.gitlab-ci.yml
@@ -0,0 +1,47 @@
+# To contribute improvements to CI/CD templates, please follow the Development guide at:
+# https://docs.gitlab.com/ee/development/cicd/templates.html
+# This specific template is located at:
+# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Kaniko.gitlab-ci.yml
+
+# Build and publish a tag/branch to Gitlab Docker Registry using Kaniko and Gitlab Docker executor.
+# Kaniko can build Docker images without using Docker-In-Docker and it's permission
+# drawbacks. No additional configuration required.
+kaniko-build:
+ variables:
+ # Additional options for Kaniko executor.
+ # For more details see https://github.com/GoogleContainerTools/kaniko/blob/master/README.md#additional-flags
+ KANIKO_ARGS: ""
+ stage: build
+ image:
+ # For latest releases see https://github.com/GoogleContainerTools/kaniko/releases
+ # Only debug/*-debug versions of the Kaniko image are known to work within Gitlab CI
+ name: gcr.io/kaniko-project/executor:debug
+ entrypoint: [""]
+ script:
+ # Compose docker tag name
+ # Git Branch/Tag to Docker Image Tag Mapping
+ # * Default Branch: main -> latest
+ # * Branch: feature/my-feature -> branch-feature-my-feature
+ # * Tag: v1.0.0/beta2 -> v1.0.0-beta2
+ - |
+ if [ "$CI_COMMIT_REF_NAME" = $CI_DEFAULT_BRANCH ]; then
+ VERSION="latest"
+ elif [ -n "$CI_COMMIT_TAG" ];then
+ NOSLASH=$(echo "$CI_COMMIT_TAG" | tr -s / - )
+ SANITIZED="${NOSLASH//[^a-zA-Z0-9\-\.]/}"
+ VERSION="$SANITIZED"
+ else \
+ NOSLASH=$(echo "$CI_COMMIT_REF_NAME" | tr -s / - )
+ SANITIZED="${NOSLASH//[^a-zA-Z0-9\-]/}"
+ VERSION="branch-$SANITIZED"
+ fi
+ - echo $VERSION
+ - mkdir -p /kaniko/.docker
+ # Write credentials to access Gitlab Container Registry within the runner/ci
+ - echo "{\"auths\":{\"$CI_REGISTRY\":{\"auth\":\"$(echo -n ${CI_REGISTRY_USER}:${CI_REGISTRY_PASSWORD} | base64 | tr -d '\n')\"}}}" > /kaniko/.docker/config.json
+ # Build and push the container. To disable push add --no-push
+ - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CI_REGISTRY_IMAGE:$VERSION $KANIKO_ARGS
+ # Run this job in a branch/tag where a Dockerfile exists
+ rules:
+ - exists:
+ - Dockerfile
diff --git a/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml
index ceeefa8aea6..544774d3b06 100644
--- a/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml
@@ -1,7 +1,7 @@
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/ee/development/cicd/templates.html
# This specific template is located at:
-# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/API-Fuzzing.lastest.gitlab-ci.yml
+# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml
# Read more about this feature here: https://docs.gitlab.com/ee/user/application_security/api_fuzzing/
#
diff --git a/lib/gitlab/ci/templates/Security/Cluster-Image-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Cluster-Image-Scanning.gitlab-ci.yml
index ed4876c2bcc..6b861510eef 100644
--- a/lib/gitlab/ci/templates/Security/Cluster-Image-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Cluster-Image-Scanning.gitlab-ci.yml
@@ -12,7 +12,7 @@
# List of available variables: https://docs.gitlab.com/ee/user/application_security/cluster_image_scanning/#available-variables
variables:
- CIS_ANALYZER_IMAGE: registry.gitlab.com/gitlab-org/security-products/analyzers/cluster-image-scanning:0
+ CIS_ANALYZER_IMAGE: registry.gitlab.com/security-products/cluster-image-scanning:0
cluster_image_scanning:
image: "$CIS_ANALYZER_IMAGE"
diff --git a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
index 0802868d67f..0ecbe5e14b8 100644
--- a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
@@ -51,7 +51,7 @@ dast:
$REVIEW_DISABLED
when: never
- if: $CI_COMMIT_BRANCH &&
- $CI_KUBERNETES_ACTIVE &&
+ ($CI_KUBERNETES_ACTIVE || $KUBECONFIG) &&
$GITLAB_FEATURES =~ /\bdast\b/
- if: $CI_COMMIT_BRANCH &&
$GITLAB_FEATURES =~ /\bdast\b/
diff --git a/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml
index ac7d87a4cda..3d07674c377 100644
--- a/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml
@@ -1,7 +1,7 @@
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/ee/development/cicd/templates.html
# This specific template is located at:
-# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST.lastest.gitlab-ci.yml
+# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml
# To use this template, add the following to your .gitlab-ci.yml file:
#
@@ -52,7 +52,7 @@ dast:
$DAST_API_SPECIFICATION == null
when: never
- if: $CI_COMMIT_BRANCH &&
- $CI_KUBERNETES_ACTIVE &&
+ ($CI_KUBERNETES_ACTIVE || $KUBECONFIG) &&
$GITLAB_FEATURES =~ /\bdast\b/
- if: $CI_COMMIT_BRANCH &&
$DAST_WEBSITE
diff --git a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
index aa7b394a13c..197ce2438e6 100644
--- a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
@@ -74,6 +74,9 @@ gemnasium-maven-dependency_scanning:
# override the analyzer image with a custom value. This may be subject to change or
# breakage across GitLab releases.
DS_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/gemnasium-maven:$DS_MAJOR_VERSION"
+ # Stop reporting Gradle as "maven".
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/338252
+ DS_REPORT_PACKAGE_MANAGER_MAVEN_WHEN_JAVA: "false"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED
when: never
@@ -97,6 +100,9 @@ gemnasium-python-dependency_scanning:
# override the analyzer image with a custom value. This may be subject to change or
# breakage across GitLab releases.
DS_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/gemnasium-python:$DS_MAJOR_VERSION"
+ # Stop reporting Pipenv and Setuptools as "pip".
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/338252
+ DS_REPORT_PACKAGE_MANAGER_PIP_WHEN_PYTHON: "false"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED
when: never
diff --git a/lib/gitlab/ci/templates/Security/SAST-IaC.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/SAST-IaC.latest.gitlab-ci.yml
new file mode 100644
index 00000000000..8c0d72ff282
--- /dev/null
+++ b/lib/gitlab/ci/templates/Security/SAST-IaC.latest.gitlab-ci.yml
@@ -0,0 +1,2 @@
+include:
+ template: Jobs/SAST-IaC.latest.gitlab-ci.yml
diff --git a/lib/gitlab/ci/templates/Terraform.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Terraform.latest.gitlab-ci.yml
index 081a3a6cc78..e554742735c 100644
--- a/lib/gitlab/ci/templates/Terraform.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Terraform.latest.gitlab-ci.yml
@@ -7,20 +7,17 @@ include:
- template: Terraform/Base.latest.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml
stages:
- - init
- validate
- build
- deploy
- - cleanup
-
-init:
- extends: .terraform:init
fmt:
extends: .terraform:fmt
+ needs: []
validate:
extends: .terraform:validate
+ needs: []
build:
extends: .terraform:build
diff --git a/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml
index 3a70e6bc4b8..a0ec07e61e1 100644
--- a/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml
@@ -21,18 +21,11 @@ cache:
paths:
- ${TF_ROOT}/.terraform/
-.terraform:init: &terraform_init
- stage: init
- script:
- - cd ${TF_ROOT}
- - gitlab-terraform init
-
.terraform:fmt: &terraform_fmt
stage: validate
- needs: []
script:
- cd ${TF_ROOT}
- - gitlab-terraform fmt -check -recursive
+ - gitlab-terraform fmt
allow_failure: true
.terraform:validate: &terraform_validate
@@ -60,10 +53,9 @@ cache:
- cd ${TF_ROOT}
- gitlab-terraform apply
resource_group: ${TF_STATE_NAME}
- when: manual
- only:
- variables:
- - $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ when: manual
.terraform:destroy: &terraform_destroy
stage: cleanup
diff --git a/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml b/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml
index 22c40d8a8b8..4f63ff93d4d 100644
--- a/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml
@@ -13,7 +13,7 @@ stages:
a11y:
stage: accessibility
- image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:5.3.0-gitlab.3
+ image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:6.0.1
script: /gitlab-accessibility.sh $a11y_urls
allow_failure: true
artifacts:
diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb
index 25075cc8f90..7d08f0230fc 100644
--- a/lib/gitlab/ci/trace.rb
+++ b/lib/gitlab/ci/trace.rb
@@ -78,7 +78,7 @@ module Gitlab
end
def archived_trace_exist?
- trace_artifact&.exists?
+ archived?
end
def live_trace_exist?
@@ -156,7 +156,7 @@ module Gitlab
def read_stream
stream = Gitlab::Ci::Trace::Stream.new do
- if trace_artifact
+ if archived?
trace_artifact.open
elsif job.trace_chunks.any?
Gitlab::Ci::Trace::ChunkedIO.new(job)
@@ -174,7 +174,7 @@ module Gitlab
def unsafe_write!(mode, &blk)
stream = Gitlab::Ci::Trace::Stream.new do
- if trace_artifact
+ if archived?
raise AlreadyArchivedError, 'Could not write to the archived trace'
elsif current_path
File.open(current_path, mode)
@@ -195,7 +195,7 @@ module Gitlab
def unsafe_archive!
raise ArchiveError, 'Job is not finished yet' unless job.complete?
- already_archived?.tap do |archived|
+ archived?.tap do |archived|
destroy_any_orphan_trace_data!
raise AlreadyArchivedError, 'Could not archive again' if archived
end
@@ -218,7 +218,7 @@ module Gitlab
end
end
- def already_archived?
+ def archived?
# TODO check checksum to ensure archive completed successfully
# See https://gitlab.com/gitlab-org/gitlab/-/issues/259619
trace_artifact&.archived_trace_exists?
@@ -227,11 +227,12 @@ module Gitlab
def destroy_any_orphan_trace_data!
return unless trace_artifact
- if already_archived?
- # An archive already exists, so make sure to remove the trace chunks
+ if archived?
+ # An archive file exists, so remove the trace chunks
erase_trace_chunks!
else
- # An archive already exists, but its associated file does not, so remove it
+ # A trace artifact record exists with no archive file
+ # but an archive was attempted, so cleanup the associated record
trace_artifact.destroy!
end
end
diff --git a/lib/gitlab/ci/trace/archive.rb b/lib/gitlab/ci/trace/archive.rb
index 5047cf04562..d4a451ca526 100644
--- a/lib/gitlab/ci/trace/archive.rb
+++ b/lib/gitlab/ci/trace/archive.rb
@@ -62,7 +62,7 @@ module Gitlab
trace_metadata.update!(remote_checksum: remote_checksum)
unless trace_metadata.remote_checksum_valid?
- metrics.increment_error_counter(type: :archive_invalid_checksum)
+ metrics.increment_error_counter(error_reason: :archive_invalid_checksum)
end
end
diff --git a/lib/gitlab/ci/trace/metrics.rb b/lib/gitlab/ci/trace/metrics.rb
index 174a5f184ff..f3ded3cda4a 100644
--- a/lib/gitlab/ci/trace/metrics.rb
+++ b/lib/gitlab/ci/trace/metrics.rb
@@ -21,7 +21,7 @@ module Gitlab
:corrupted # malformed trace found after comparing CRC32 and size
].freeze
- TRACE_ERROR_TYPES = [
+ TRACE_ERROR_REASONS = [
:chunks_invalid_size, # used to be :corrupted
:chunks_invalid_checksum, # used to be :invalid
:archive_invalid_checksum # malformed trace found into object store after comparing MD5
@@ -39,12 +39,12 @@ module Gitlab
self.class.trace_bytes.increment({}, size.to_i)
end
- def increment_error_counter(type: :unknown)
- unless TRACE_ERROR_TYPES.include?(type)
- raise ArgumentError, "unknown error type: #{type}"
+ def increment_error_counter(error_reason: :unknown)
+ unless TRACE_ERROR_REASONS.include?(error_reason)
+ raise ArgumentError, "unknown error reason: #{error_reason}"
end
- self.class.trace_errors_counter.increment(type: type)
+ self.class.trace_errors_counter.increment(error_reason: error_reason)
end
def observe_migration_duration(seconds)
diff --git a/lib/gitlab/ci/variables/builder.rb b/lib/gitlab/ci/variables/builder.rb
new file mode 100644
index 00000000000..f4c5a06af97
--- /dev/null
+++ b/lib/gitlab/ci/variables/builder.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Variables
+ class Builder
+ include ::Gitlab::Utils::StrongMemoize
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ end
+
+ def scoped_variables(job, environment:, dependencies:)
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ variables.concat(predefined_variables(job)) if pipeline.predefined_vars_in_builder_enabled?
+ end
+ end
+
+ private
+
+ attr_reader :pipeline
+
+ def predefined_variables(job)
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ variables.append(key: 'CI_JOB_NAME', value: job.name)
+ variables.append(key: 'CI_JOB_STAGE', value: job.stage)
+ variables.append(key: 'CI_JOB_MANUAL', value: 'true') if job.action?
+ variables.append(key: 'CI_PIPELINE_TRIGGERED', value: 'true') if job.trigger_request
+
+ variables.append(key: 'CI_NODE_INDEX', value: job.options[:instance].to_s) if job.options&.include?(:instance)
+ variables.append(key: 'CI_NODE_TOTAL', value: ci_node_total_value(job).to_s)
+
+ # legacy variables
+ variables.append(key: 'CI_BUILD_NAME', value: job.name)
+ variables.append(key: 'CI_BUILD_STAGE', value: job.stage)
+ variables.append(key: 'CI_BUILD_TRIGGERED', value: 'true') if job.trigger_request
+ variables.append(key: 'CI_BUILD_MANUAL', value: 'true') if job.action?
+ end
+ end
+
+ def ci_node_total_value(job)
+ parallel = job.options&.dig(:parallel)
+ parallel = parallel.dig(:total) if parallel.is_a?(Hash)
+ parallel || 1
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/variables/collection.rb b/lib/gitlab/ci/variables/collection.rb
index 09c75a2b3f1..a00c1da97ea 100644
--- a/lib/gitlab/ci/variables/collection.rb
+++ b/lib/gitlab/ci/variables/collection.rb
@@ -89,9 +89,7 @@ module Gitlab
end
end
- def sort_and_expand_all(project, keep_undefined: false)
- return self if Feature.disabled?(:variable_inside_variable, project, default_enabled: :yaml)
-
+ def sort_and_expand_all(keep_undefined: false)
sorted = Sort.new(self)
return self.class.new(self, sorted.errors) unless sorted.valid?
diff --git a/lib/gitlab/ci/yaml_processor/result.rb b/lib/gitlab/ci/yaml_processor/result.rb
index a97c7050fbb..6215ba40ebe 100644
--- a/lib/gitlab/ci/yaml_processor/result.rb
+++ b/lib/gitlab/ci/yaml_processor/result.rb
@@ -80,7 +80,6 @@ module Gitlab
cache: job[:cache],
resource_group_key: job[:resource_group],
scheduling_type: job[:scheduling_type],
- secrets: job[:secrets],
options: {
image: job[:image],
services: job[:services],
diff --git a/lib/gitlab/config_checker/external_database_checker.rb b/lib/gitlab/config_checker/external_database_checker.rb
index a56f2413615..54320b7ff9a 100644
--- a/lib/gitlab/config_checker/external_database_checker.rb
+++ b/lib/gitlab/config_checker/external_database_checker.rb
@@ -6,7 +6,7 @@ module Gitlab
extend self
def check
- return [] if Gitlab::Database.main.postgresql_minimum_supported_version?
+ return [] if ApplicationRecord.database.postgresql_minimum_supported_version?
[
{
@@ -15,7 +15,7 @@ module Gitlab
'%{pg_version_minimum} is required for this version of GitLab. ' \
'Please upgrade your environment to a supported PostgreSQL version, ' \
'see %{pg_requirements_url} for details.') % {
- pg_version_current: Gitlab::Database.main.version,
+ pg_version_current: ApplicationRecord.database.version,
pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION,
pg_requirements_url: '<a href="https://docs.gitlab.com/ee/install/requirements.html#database">database requirements</a>'
}
diff --git a/lib/gitlab/container_repository/tags/cache.rb b/lib/gitlab/container_repository/tags/cache.rb
new file mode 100644
index 00000000000..ff457fb9219
--- /dev/null
+++ b/lib/gitlab/container_repository/tags/cache.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ContainerRepository
+ module Tags
+ class Cache
+ def initialize(container_repository)
+ @container_repository = container_repository
+ @cached_tag_names = Set.new
+ end
+
+ def populate(tags)
+ return if tags.empty?
+
+ # This will load all tags in one Redis roundtrip
+ # the maximum number of tags is configurable and is set to 200 by default.
+ # https://gitlab.com/gitlab-org/gitlab/blob/master/doc/user/packages/container_registry/index.md#set-cleanup-limits-to-conserve-resources
+ keys = tags.map(&method(:cache_key))
+ cached_tags_count = 0
+
+ ::Gitlab::Redis::Cache.with do |redis|
+ tags.zip(redis.mget(keys)).each do |tag, created_at|
+ next unless created_at
+
+ tag.created_at = DateTime.rfc3339(created_at)
+ @cached_tag_names << tag.name
+ cached_tags_count += 1
+ end
+ end
+
+ cached_tags_count
+ end
+
+ def insert(tags, max_ttl_in_seconds)
+ return unless max_ttl_in_seconds
+ return if tags.empty?
+
+ # tags with nil created_at are not cacheable
+ # tags already cached don't need to be cached again
+ cacheable_tags = tags.select do |tag|
+ tag.created_at.present? && !tag.name.in?(@cached_tag_names)
+ end
+
+ return if cacheable_tags.empty?
+
+ now = Time.zone.now
+
+ ::Gitlab::Redis::Cache.with do |redis|
+ # we use a pipeline instead of a MSET because each tag has
+ # a specific ttl
+ redis.pipelined do
+ cacheable_tags.each do |tag|
+ created_at = tag.created_at
+ # ttl is the max_ttl_in_seconds reduced by the number
+ # of seconds that the tag has already existed
+ ttl = max_ttl_in_seconds - (now - created_at).seconds
+ ttl = ttl.to_i
+ redis.set(cache_key(tag), created_at.rfc3339, ex: ttl) if ttl > 0
+ end
+ end
+ end
+ end
+
+ private
+
+ def cache_key(tag)
+ "container_repository:{#{@container_repository.id}}:tag:#{tag.name}:created_at"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/content_security_policy/config_loader.rb b/lib/gitlab/content_security_policy/config_loader.rb
index 0e3fa8b8d87..bdae59e7e3c 100644
--- a/lib/gitlab/content_security_policy/config_loader.rb
+++ b/lib/gitlab/content_security_policy/config_loader.rb
@@ -19,30 +19,42 @@ module Gitlab
'font_src' => "'self'",
'form_action' => "'self' https: http:",
'frame_ancestors' => "'self'",
- 'frame_src' => "'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com",
+ 'frame_src' => ContentSecurityPolicy::Directives.frame_src,
'img_src' => "'self' data: blob: http: https:",
'manifest_src' => "'self'",
'media_src' => "'self'",
- 'script_src' => "'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.google.com/recaptcha/ https://www.recaptcha.net https://apis.google.com",
+ 'script_src' => ContentSecurityPolicy::Directives.script_src,
'style_src' => "'self' 'unsafe-inline'",
- 'worker_src' => "'self' blob: data:",
+ 'worker_src' => "#{Gitlab::Utils.append_path(Gitlab.config.gitlab.url, 'assets/')} blob: data:",
'object_src' => "'none'",
'report_uri' => nil
}
- # frame-src was deprecated in CSP level 2 in favor of child-src
- # CSP level 3 "undeprecated" frame-src and browsers fall back on child-src if it's missing
- # However Safari seems to read child-src first so we'll just keep both equal
- directives['child_src'] = directives['frame_src']
-
# connect_src with 'self' includes https/wss variations of the origin,
# however, safari hasn't covered this yet and we need to explicitly add
# support for websocket origins until Safari catches up with the specs
+ if Rails.env.development?
+ allow_webpack_dev_server(directives)
+ allow_letter_opener(directives)
+ allow_customersdot(directives) if ENV['CUSTOMER_PORTAL_URL'].present?
+ end
+
allow_websocket_connections(directives)
- allow_webpack_dev_server(directives) if Rails.env.development?
allow_cdn(directives, Settings.gitlab.cdn_host) if Settings.gitlab.cdn_host.present?
- allow_customersdot(directives) if Rails.env.development? && ENV['CUSTOMER_PORTAL_URL'].present?
allow_sentry(directives) if Gitlab.config.sentry&.enabled && Gitlab.config.sentry&.clientside_dsn
+ allow_framed_gitlab_paths(directives)
+
+ # The follow section contains workarounds to patch Safari's lack of support for CSP Level 3
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/343579
+ # frame-src was deprecated in CSP level 2 in favor of child-src
+ # CSP level 3 "undeprecated" frame-src and browsers fall back on child-src if it's missing
+ # However Safari seems to read child-src first so we'll just keep both equal
+ append_to_directive(directives, 'child_src', directives['frame_src'])
+
+ # Safari also doesn't support worker-src and only checks child-src
+ # So for compatibility until it catches up to other browsers we need to
+ # append worker-src's content to child-src
+ append_to_directive(directives, 'child_src', directives['worker_src'])
directives
end
@@ -100,6 +112,8 @@ module Gitlab
append_to_directive(directives, 'script_src', cdn_host)
append_to_directive(directives, 'style_src', cdn_host)
append_to_directive(directives, 'font_src', cdn_host)
+ append_to_directive(directives, 'worker_src', cdn_host)
+ append_to_directive(directives, 'frame_src', cdn_host)
end
def self.append_to_directive(directives, directive, text)
@@ -119,6 +133,21 @@ module Gitlab
append_to_directive(directives, 'connect_src', sentry_uri.to_s)
end
+
+ def self.allow_letter_opener(directives)
+ append_to_directive(directives, 'frame_src', Gitlab::Utils.append_path(Gitlab.config.gitlab.url, '/rails/letter_opener/'))
+ end
+
+ # Using 'self' in the CSP introduces several CSP bypass opportunities
+ # for this reason we list the URLs where GitLab frames itself instead
+ def self.allow_framed_gitlab_paths(directives)
+ # We need the version without trailing / for the sidekiq page itself
+ # and we also need the version with trailing / for "deeper" pages
+ # like /admin/sidekiq/busy
+ ['/admin/sidekiq', '/admin/sidekiq/', '/-/speedscope/index.html'].map do |path|
+ append_to_directive(directives, 'frame_src', Gitlab::Utils.append_path(Gitlab.config.gitlab.url, path))
+ end
+ end
end
end
end
diff --git a/lib/gitlab/content_security_policy/directives.rb b/lib/gitlab/content_security_policy/directives.rb
new file mode 100644
index 00000000000..30f3c16247d
--- /dev/null
+++ b/lib/gitlab/content_security_policy/directives.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# This module is used to return various SaaS related
+# ContentSecurityPolicy Directives src which may be
+# overridden in other variants of GitLab
+
+module Gitlab
+ module ContentSecurityPolicy
+ module Directives
+ def self.frame_src
+ "https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com"
+ end
+
+ def self.script_src
+ "'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.google.com/recaptcha/ https://www.recaptcha.net https://apis.google.com"
+ end
+ end
+ end
+end
+
+Gitlab::ContentSecurityPolicy::Directives.prepend_mod
diff --git a/lib/gitlab/contributions_calendar.rb b/lib/gitlab/contributions_calendar.rb
index 7d7c604d86a..deaaab953aa 100644
--- a/lib/gitlab/contributions_calendar.rb
+++ b/lib/gitlab/contributions_calendar.rb
@@ -2,12 +2,15 @@
module Gitlab
class ContributionsCalendar
+ include TimeZoneHelper
+
attr_reader :contributor
attr_reader :current_user
attr_reader :projects
def initialize(contributor, current_user = nil)
@contributor = contributor
+ @contributor_time_instance = local_time_instance(contributor.timezone)
@current_user = current_user
@projects = if @contributor.include_private_contributions?
ContributedProjectsFinder.new(@contributor).execute(@contributor)
@@ -22,7 +25,7 @@ module Gitlab
# Can't use Event.contributions here because we need to check 3 different
# project_features for the (currently) 3 different contribution types
- date_from = 1.year.ago
+ date_from = @contributor_time_instance.now.years_ago(1)
repo_events = event_counts(date_from, :repository)
.having(action: :pushed)
issue_events = event_counts(date_from, :issues)
@@ -47,19 +50,21 @@ module Gitlab
def events_by_date(date)
return Event.none unless can_read_cross_project?
+ date_in_time_zone = date.in_time_zone(@contributor_time_instance)
+
Event.contributions.where(author_id: contributor.id)
- .where(created_at: date.beginning_of_day..date.end_of_day)
+ .where(created_at: date_in_time_zone.beginning_of_day..date_in_time_zone.end_of_day)
.where(project_id: projects)
.with_associations
end
# rubocop: enable CodeReuse/ActiveRecord
def starting_year
- 1.year.ago.year
+ @contributor_time_instance.now.years_ago(1).year
end
def starting_month
- Date.current.month
+ @contributor_time_instance.today.month
end
private
@@ -82,10 +87,10 @@ module Gitlab
.select(:id)
conditions = t[:created_at].gteq(date_from.beginning_of_day)
- .and(t[:created_at].lteq(Date.current.end_of_day))
+ .and(t[:created_at].lteq(@contributor_time_instance.today.end_of_day))
.and(t[:author_id].eq(contributor.id))
- date_interval = "INTERVAL '#{Time.zone.now.utc_offset} seconds'"
+ date_interval = "INTERVAL '#{@contributor_time_instance.now.utc_offset} seconds'"
Event.reorder(nil)
.select(t[:project_id], t[:target_type], t[:action], "date(created_at + #{date_interval}) AS date", 'count(id) as total_amount')
diff --git a/lib/gitlab/current_settings.rb b/lib/gitlab/current_settings.rb
index bfe3f06a56b..b9034cff447 100644
--- a/lib/gitlab/current_settings.rb
+++ b/lib/gitlab/current_settings.rb
@@ -85,7 +85,7 @@ module Gitlab
active_db_connection = ActiveRecord::Base.connection.active? rescue false
active_db_connection &&
- Gitlab::Database.main.cached_table_exists?('application_settings')
+ ApplicationSetting.database.cached_table_exists?
rescue ActiveRecord::NoDatabaseError
false
end
diff --git a/lib/gitlab/cycle_analytics/stage_summary.rb b/lib/gitlab/cycle_analytics/stage_summary.rb
index b309802f296..fdbf068303f 100644
--- a/lib/gitlab/cycle_analytics/stage_summary.rb
+++ b/lib/gitlab/cycle_analytics/stage_summary.rb
@@ -38,7 +38,8 @@ module Gitlab
serialize(
Summary::DeploymentFrequency.new(
deployments: deployments_summary.value.raw_value,
- options: @options),
+ options: @options,
+ project: @project),
with_unit: true
)
end
diff --git a/lib/gitlab/cycle_analytics/summary/base.rb b/lib/gitlab/cycle_analytics/summary/base.rb
index e30e526f017..f2ff86a40a2 100644
--- a/lib/gitlab/cycle_analytics/summary/base.rb
+++ b/lib/gitlab/cycle_analytics/summary/base.rb
@@ -17,6 +17,10 @@ module Gitlab
raise NotImplementedError, "Expected #{self.name} to implement value"
end
+ def links
+ []
+ end
+
private
attr_reader :project, :options
diff --git a/lib/gitlab/cycle_analytics/summary/deployment_frequency.rb b/lib/gitlab/cycle_analytics/summary/deployment_frequency.rb
index 1947866d772..2b1529bdc1a 100644
--- a/lib/gitlab/cycle_analytics/summary/deployment_frequency.rb
+++ b/lib/gitlab/cycle_analytics/summary/deployment_frequency.rb
@@ -6,7 +6,7 @@ module Gitlab
class DeploymentFrequency < Base
include SummaryHelper
- def initialize(deployments:, options:, project: nil)
+ def initialize(deployments:, options:, project:)
@deployments = deployments
super(project: project, options: options)
@@ -23,6 +23,13 @@ module Gitlab
def unit
_('per day')
end
+
+ def links
+ [
+ { "name" => _('Deployment frequency'), "url" => Gitlab::Routing.url_helpers.charts_project_pipelines_path(project, chart: 'deployment-frequency'), "label" => s_('ValueStreamAnalytics|Dashboard') },
+ { "name" => _('Deployment frequency'), "url" => Gitlab::Routing.url_helpers.help_page_path('user/analytics/index', anchor: 'definitions'), "docs_link" => true, "label" => s_('ValueStreamAnalytics|Go to docs') }
+ ]
+ end
end
end
end
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index b560d4cbca8..9c74e5d2ca8 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -59,19 +59,8 @@ module Gitlab
# that inher from ActiveRecord::Base; not just our own models that
# inherit from ApplicationRecord.
main: ::ActiveRecord::Base,
- ci: ::Ci::CiDatabaseRecord.connection_class? ? ::Ci::CiDatabaseRecord : nil
- }.compact.freeze
- end
-
- def self.databases
- @databases ||= database_base_models
- .transform_values { |connection_class| Connection.new(connection_class) }
- .with_indifferent_access
- .freeze
- end
-
- def self.main
- databases[PRIMARY_DATABASE_NAME]
+ ci: ::Ci::ApplicationRecord.connection_class? ? ::Ci::ApplicationRecord : nil
+ }.compact.with_indifferent_access.freeze
end
# We configure the database connection pool size automatically based on the
@@ -110,8 +99,10 @@ module Gitlab
def self.check_postgres_version_and_print_warning
return if Gitlab::Runtime.rails_runner?
- databases.each do |name, connection|
- next if connection.postgresql_minimum_supported_version?
+ database_base_models.each do |name, model|
+ database = Gitlab::Database::Reflection.new(model)
+
+ next if database.postgresql_minimum_supported_version?
Kernel.warn ERB.new(Rainbow.new.wrap(<<~EOS).red).result
@@ -122,7 +113,7 @@ module Gitlab
 ███ ███  ██  ██ ██  ██ ██   ████ ██ ██   ████  ██████  
******************************************************************************
- You are using PostgreSQL #{connection.version} for the #{name} database, but PostgreSQL >= <%= Gitlab::Database::MINIMUM_POSTGRES_VERSION %>
+ You are using PostgreSQL #{database.version} for the #{name} database, but PostgreSQL >= <%= Gitlab::Database::MINIMUM_POSTGRES_VERSION %>
is required for this version of GitLab.
<% if Rails.env.development? || Rails.env.test? %>
If using gitlab-development-kit, please find the relevant steps here:
@@ -177,18 +168,6 @@ module Gitlab
yield
end
- # This method will allow cross database modifications within the block
- # Example:
- #
- # allow_cross_database_modification_within_transaction(url: 'url-to-an-issue') do
- # create(:build) # inserts ci_build and project record in one transaction
- # end
- def self.allow_cross_database_modification_within_transaction(url:)
- # this method will be overridden in:
- # spec/support/database/cross_database_modification_check.rb
- yield
- end
-
def self.add_post_migrate_path_to_rails(force: false)
return if ENV['SKIP_POST_DEPLOYMENT_MIGRATIONS'] && !force
@@ -263,14 +242,28 @@ module Gitlab
# A patch over ActiveRecord::Base.transaction that provides
# observability into transactional methods.
def transaction(**options, &block)
- if options[:requires_new] && connection.transaction_open?
- ::Gitlab::Database::Metrics.subtransactions_increment(self.name)
- end
+ transaction_type = get_transaction_type(connection.transaction_open?, options[:requires_new])
+
+ ::Gitlab::Database::Metrics.subtransactions_increment(self.name) if transaction_type == :sub_transaction
+
+ payload = { connection: connection, transaction_type: transaction_type }
- ActiveSupport::Notifications.instrument('transaction.active_record', { connection: connection }) do
+ ActiveSupport::Notifications.instrument('transaction.active_record', payload) do
super(**options, &block)
end
end
+
+ private
+
+ def get_transaction_type(transaction_open, requires_new_flag)
+ if transaction_open
+ return :sub_transaction if requires_new_flag
+
+ return :fake_transaction
+ end
+
+ :real_transaction
+ end
end
end
diff --git a/lib/gitlab/database/as_with_materialized.rb b/lib/gitlab/database/as_with_materialized.rb
index 07809c5b592..a04ea97117d 100644
--- a/lib/gitlab/database/as_with_materialized.rb
+++ b/lib/gitlab/database/as_with_materialized.rb
@@ -19,7 +19,7 @@ module Gitlab
# Note: to be deleted after the minimum PG version is set to 12.0
def self.materialized_supported?
strong_memoize(:materialized_supported) do
- Gitlab::Database.main.version.match?(/^1[2-9]\./) # version 12.x and above
+ ApplicationRecord.database.version.match?(/^1[2-9]\./) # version 12.x and above
end
end
diff --git a/lib/gitlab/database/async_indexes/index_creator.rb b/lib/gitlab/database/async_indexes/index_creator.rb
index 00de79ec970..994a1deba57 100644
--- a/lib/gitlab/database/async_indexes/index_creator.rb
+++ b/lib/gitlab/database/async_indexes/index_creator.rb
@@ -40,7 +40,7 @@ module Gitlab
end
def connection
- @connection ||= ApplicationRecord.connection
+ @connection ||= async_index.connection
end
def lease_timeout
diff --git a/lib/gitlab/database/async_indexes/postgres_async_index.rb b/lib/gitlab/database/async_indexes/postgres_async_index.rb
index 236459e6216..6cb40729061 100644
--- a/lib/gitlab/database/async_indexes/postgres_async_index.rb
+++ b/lib/gitlab/database/async_indexes/postgres_async_index.rb
@@ -3,7 +3,7 @@
module Gitlab
module Database
module AsyncIndexes
- class PostgresAsyncIndex < ApplicationRecord
+ class PostgresAsyncIndex < SharedModel
self.table_name = 'postgres_async_indexes'
MAX_IDENTIFIER_LENGTH = Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
diff --git a/lib/gitlab/database/background_migration_job.rb b/lib/gitlab/database/background_migration_job.rb
index 1121793917b..c046571a111 100644
--- a/lib/gitlab/database/background_migration_job.rb
+++ b/lib/gitlab/database/background_migration_job.rb
@@ -4,6 +4,7 @@ module Gitlab
module Database
class BackgroundMigrationJob < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord
include EachBatch
+ include BulkInsertSafe
self.table_name = :background_migration_jobs
diff --git a/lib/gitlab/database/batch_counter.rb b/lib/gitlab/database/batch_counter.rb
index 7efa5b46ecb..6c0ce9e481a 100644
--- a/lib/gitlab/database/batch_counter.rb
+++ b/lib/gitlab/database/batch_counter.rb
@@ -31,7 +31,7 @@ module Gitlab
end
def count(batch_size: nil, mode: :itself, start: nil, finish: nil)
- raise 'BatchCount can not be run inside a transaction' if @relation.connection.transaction_open?
+ raise 'BatchCount can not be run inside a transaction' if transaction_open?
check_mode!(mode)
@@ -87,6 +87,10 @@ module Gitlab
results
end
+ def transaction_open?
+ @relation.connection.transaction_open?
+ end
+
def merge_results(results, object)
return object unless results
diff --git a/lib/gitlab/database/connection.rb b/lib/gitlab/database/connection.rb
deleted file mode 100644
index cda6220ee6c..00000000000
--- a/lib/gitlab/database/connection.rb
+++ /dev/null
@@ -1,260 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Database
- # Configuration settings and methods for interacting with a PostgreSQL
- # database, with support for multiple databases.
- class Connection
- attr_reader :scope
-
- # Initializes a new `Database`.
- #
- # The `scope` argument must be an object (such as `ActiveRecord::Base`)
- # that supports retrieving connections and connection pools.
- def initialize(scope = ActiveRecord::Base)
- @config = nil
- @scope = scope
- @version = nil
- @open_transactions_baseline = 0
- end
-
- def config
- # The result of this method must not be cached, as other methods may use
- # it after making configuration changes and expect those changes to be
- # present. For example, `disable_prepared_statements` expects the
- # configuration settings to always be up to date.
- #
- # See the following for more information:
- #
- # - https://gitlab.com/gitlab-org/release/retrospectives/-/issues/39
- # - https://gitlab.com/gitlab-com/gl-infra/production/-/issues/5238
- scope.connection_db_config.configuration_hash.with_indifferent_access
- end
-
- def pool_size
- config[:pool] || Database.default_pool_size
- end
-
- def username
- config[:username] || ENV['USER']
- end
-
- def database_name
- config[:database]
- end
-
- def adapter_name
- config[:adapter]
- end
-
- def human_adapter_name
- if postgresql?
- 'PostgreSQL'
- else
- 'Unknown'
- end
- end
-
- def postgresql?
- adapter_name.casecmp('postgresql') == 0
- end
-
- def db_config_with_default_pool_size
- db_config_object = scope.connection_db_config
- config = db_config_object
- .configuration_hash
- .merge(pool: Database.default_pool_size)
-
- ActiveRecord::DatabaseConfigurations::HashConfig.new(
- db_config_object.env_name,
- db_config_object.name,
- config
- )
- end
-
- # Disables prepared statements for the current database connection.
- def disable_prepared_statements
- db_config_object = scope.connection_db_config
- config = db_config_object.configuration_hash.merge(prepared_statements: false)
-
- hash_config = ActiveRecord::DatabaseConfigurations::HashConfig.new(
- db_config_object.env_name,
- db_config_object.name,
- config
- )
-
- scope.establish_connection(hash_config)
- end
-
- # Check whether the underlying database is in read-only mode
- def db_read_only?
- pg_is_in_recovery =
- scope
- .connection
- .execute('SELECT pg_is_in_recovery()')
- .first
- .fetch('pg_is_in_recovery')
-
- Gitlab::Utils.to_boolean(pg_is_in_recovery)
- end
-
- def db_read_write?
- !db_read_only?
- end
-
- def version
- @version ||= database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
- end
-
- def database_version
- connection.execute("SELECT VERSION()").first['version']
- end
-
- def postgresql_minimum_supported_version?
- version.to_f >= MINIMUM_POSTGRES_VERSION
- end
-
- # Bulk inserts a number of rows into a table, optionally returning their
- # IDs.
- #
- # table - The name of the table to insert the rows into.
- # rows - An Array of Hash instances, each mapping the columns to their
- # values.
- # return_ids - When set to true the return value will be an Array of IDs of
- # the inserted rows
- # disable_quote - A key or an Array of keys to exclude from quoting (You
- # become responsible for protection from SQL injection for
- # these keys!)
- # on_conflict - Defines an upsert. Values can be: :disabled (default) or
- # :do_nothing
- def bulk_insert(table, rows, return_ids: false, disable_quote: [], on_conflict: nil)
- return if rows.empty?
-
- keys = rows.first.keys
- columns = keys.map { |key| connection.quote_column_name(key) }
-
- disable_quote = Array(disable_quote).to_set
- tuples = rows.map do |row|
- keys.map do |k|
- disable_quote.include?(k) ? row[k] : connection.quote(row[k])
- end
- end
-
- sql = <<-EOF
- INSERT INTO #{table} (#{columns.join(', ')})
- VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
- EOF
-
- sql = "#{sql} ON CONFLICT DO NOTHING" if on_conflict == :do_nothing
-
- sql = "#{sql} RETURNING id" if return_ids
-
- result = connection.execute(sql)
-
- if return_ids
- result.values.map { |tuple| tuple[0].to_i }
- else
- []
- end
- end
-
- def cached_column_exists?(table_name, column_name)
- connection
- .schema_cache.columns_hash(table_name)
- .has_key?(column_name.to_s)
- end
-
- def cached_table_exists?(table_name)
- exists? && connection.schema_cache.data_source_exists?(table_name)
- end
-
- def exists?
- # We can't _just_ check if `connection` raises an error, as it will
- # point to a `ConnectionProxy`, and obtaining those doesn't involve any
- # database queries. So instead we obtain the database version, which is
- # cached after the first call.
- connection.schema_cache.database_version
- true
- rescue StandardError
- false
- end
-
- def system_id
- row = connection
- .execute('SELECT system_identifier FROM pg_control_system()')
- .first
-
- row['system_identifier']
- end
-
- def pg_wal_lsn_diff(location1, location2)
- lsn1 = connection.quote(location1)
- lsn2 = connection.quote(location2)
-
- query = <<-SQL.squish
- SELECT pg_wal_lsn_diff(#{lsn1}, #{lsn2})
- AS result
- SQL
-
- row = connection.select_all(query).first
- row['result'] if row
- end
-
- # @param [ActiveRecord::Connection] ar_connection
- # @return [String]
- def get_write_location(ar_connection)
- use_new_load_balancer_query = Gitlab::Utils
- .to_boolean(ENV['USE_NEW_LOAD_BALANCER_QUERY'], default: true)
-
- sql =
- if use_new_load_balancer_query
- <<~NEWSQL
- SELECT CASE
- WHEN pg_is_in_recovery() = true AND EXISTS (SELECT 1 FROM pg_stat_get_wal_senders())
- THEN pg_last_wal_replay_lsn()::text
- WHEN pg_is_in_recovery() = false
- THEN pg_current_wal_insert_lsn()::text
- ELSE NULL
- END AS location;
- NEWSQL
- else
- <<~SQL
- SELECT pg_current_wal_insert_lsn()::text AS location
- SQL
- end
-
- row = ar_connection.select_all(sql).first
- row['location'] if row
- end
-
- # inside_transaction? will return true if the caller is running within a
- # transaction. Handles special cases when running inside a test
- # environment, where tests may be wrapped in transactions
- def inside_transaction?
- base = Rails.env.test? ? @open_transactions_baseline : 0
-
- scope.connection.open_transactions > base
- end
-
- # These methods that access @open_transactions_baseline are not
- # thread-safe. These are fine though because we only call these in
- # RSpec's main thread. If we decide to run specs multi-threaded, we would
- # need to use something like ThreadGroup to keep track of this value
- def set_open_transactions_baseline
- @open_transactions_baseline = scope.connection.open_transactions
- end
-
- def reset_open_transactions_baseline
- @open_transactions_baseline = 0
- end
-
- private
-
- def connection
- scope.connection
- end
- end
- end
-end
-
-Gitlab::Database::Connection.prepend_mod_with('Gitlab::Database::Connection')
diff --git a/lib/gitlab/database/each_database.rb b/lib/gitlab/database/each_database.rb
new file mode 100644
index 00000000000..7c9e65e6691
--- /dev/null
+++ b/lib/gitlab/database/each_database.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module EachDatabase
+ class << self
+ def each_database_connection
+ Gitlab::Database.database_base_models.each_pair do |connection_name, model|
+ connection = model.connection
+
+ with_shared_connection(connection, connection_name) do
+ yield connection, connection_name
+ end
+ end
+ end
+
+ def each_model_connection(models)
+ models.each do |model|
+ connection_name = model.connection.pool.db_config.name
+
+ with_shared_connection(model.connection, connection_name) do
+ yield model, connection_name
+ end
+ end
+ end
+
+ private
+
+ def with_shared_connection(connection, connection_name)
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ Gitlab::AppLogger.debug(message: 'Switched database connection', connection_name: connection_name)
+
+ yield
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/gitlab_schema.rb b/lib/gitlab/database/gitlab_schema.rb
new file mode 100644
index 00000000000..14807494a79
--- /dev/null
+++ b/lib/gitlab/database/gitlab_schema.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+# This module gathers information about table to schema mapping
+# to understand table affinity
+#
+# Each table / view needs to have assigned gitlab_schema. Names supported today:
+#
+# - gitlab_shared - defines a set of tables that are found on all databases (data accessed is dependent on connection)
+# - gitlab_main / gitlab_ci - defines a set of tables that can only exist on a given database
+#
+# Tables for the purpose of tests should be prefixed with `_test_my_table_name`
+
+module Gitlab
+ module Database
+ module GitlabSchema
+ # These tables are deleted/renamed, but still referenced by migrations.
+ # This is needed for now, but should be removed in the future
+ DELETED_TABLES = {
+ # main tables
+ 'alerts_service_data' => :gitlab_main,
+ 'analytics_devops_adoption_segment_selections' => :gitlab_main,
+ 'analytics_repository_file_commits' => :gitlab_main,
+ 'analytics_repository_file_edits' => :gitlab_main,
+ 'analytics_repository_files' => :gitlab_main,
+ 'audit_events_archived' => :gitlab_main,
+ 'backup_labels' => :gitlab_main,
+ 'clusters_applications_fluentd' => :gitlab_main,
+ 'forked_project_links' => :gitlab_main,
+ 'issue_milestones' => :gitlab_main,
+ 'merge_request_milestones' => :gitlab_main,
+ 'namespace_onboarding_actions' => :gitlab_main,
+ 'services' => :gitlab_main,
+ 'terraform_state_registry' => :gitlab_main,
+ 'tmp_fingerprint_sha256_migration' => :gitlab_main, # used by lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys.rb
+ 'web_hook_logs_archived' => :gitlab_main,
+ 'vulnerability_export_registry' => :gitlab_main,
+ 'vulnerability_finding_fingerprints' => :gitlab_main,
+ 'vulnerability_export_verification_status' => :gitlab_main,
+
+ # CI tables
+ 'ci_build_trace_sections' => :gitlab_ci,
+ 'ci_build_trace_section_names' => :gitlab_ci,
+ 'ci_daily_report_results' => :gitlab_ci,
+ 'ci_test_cases' => :gitlab_ci,
+ 'ci_test_case_failures' => :gitlab_ci,
+
+ # leftovers from early implementation of partitioning
+ 'audit_events_part_5fc467ac26' => :gitlab_main,
+ 'web_hook_logs_part_0c5294f417' => :gitlab_main
+ }.freeze
+
+ def self.table_schemas(tables)
+ tables.map { |table| table_schema(table) }.to_set
+ end
+
+ def self.table_schema(name)
+ schema_name, table_name = name.split('.', 2) # Strip schema name like: `public.`
+
+ # Most of names do not have schemas, ensure that this is table
+ unless table_name
+ table_name = schema_name
+ schema_name = nil
+ end
+
+ # strip partition number of a form `loose_foreign_keys_deleted_records_1`
+ table_name.gsub!(/_[0-9]+$/, '')
+
+ # Tables that are properly mapped
+ if gitlab_schema = tables_to_schema[table_name]
+ return gitlab_schema
+ end
+
+ # Tables that are deleted, but we still need to reference them
+ if gitlab_schema = DELETED_TABLES[table_name]
+ return gitlab_schema
+ end
+
+ # All tables from `information_schema.` are `:gitlab_shared`
+ return :gitlab_shared if schema_name == 'information_schema'
+
+ # All tables that start with `_test_` are shared and ignored
+ return :gitlab_shared if table_name.start_with?('_test_')
+
+ # All `pg_` tables are marked as `shared`
+ return :gitlab_shared if table_name.start_with?('pg_')
+
+ # When undefined it's best to return a unique name so that we don't incorrectly assume that 2 undefined schemas belong on the same database
+ :"undefined_#{table_name}"
+ end
+
+ def self.tables_to_schema
+ @tables_to_schema ||= YAML.load_file(Rails.root.join('lib/gitlab/database/gitlab_schemas.yml'))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/gitlab_schemas.yml b/lib/gitlab/database/gitlab_schemas.yml
new file mode 100644
index 00000000000..66157e998a0
--- /dev/null
+++ b/lib/gitlab/database/gitlab_schemas.yml
@@ -0,0 +1,543 @@
+abuse_reports: :gitlab_main
+agent_group_authorizations: :gitlab_main
+agent_project_authorizations: :gitlab_main
+alert_management_alert_assignees: :gitlab_main
+alert_management_alerts: :gitlab_main
+alert_management_alert_user_mentions: :gitlab_main
+alert_management_http_integrations: :gitlab_main
+allowed_email_domains: :gitlab_main
+analytics_cycle_analytics_group_stages: :gitlab_main
+analytics_cycle_analytics_group_value_streams: :gitlab_main
+analytics_cycle_analytics_issue_stage_events: :gitlab_main
+analytics_cycle_analytics_merge_request_stage_events: :gitlab_main
+analytics_cycle_analytics_project_stages: :gitlab_main
+analytics_cycle_analytics_project_value_streams: :gitlab_main
+analytics_cycle_analytics_stage_event_hashes: :gitlab_main
+analytics_devops_adoption_segments: :gitlab_main
+analytics_devops_adoption_snapshots: :gitlab_main
+analytics_language_trend_repository_languages: :gitlab_main
+analytics_usage_trends_measurements: :gitlab_main
+appearances: :gitlab_main
+application_settings: :gitlab_main
+application_setting_terms: :gitlab_main
+approval_merge_request_rules_approved_approvers: :gitlab_main
+approval_merge_request_rules: :gitlab_main
+approval_merge_request_rules_groups: :gitlab_main
+approval_merge_request_rule_sources: :gitlab_main
+approval_merge_request_rules_users: :gitlab_main
+approval_project_rules: :gitlab_main
+approval_project_rules_groups: :gitlab_main
+approval_project_rules_protected_branches: :gitlab_main
+approval_project_rules_users: :gitlab_main
+approvals: :gitlab_main
+approver_groups: :gitlab_main
+approvers: :gitlab_main
+ar_internal_metadata: :gitlab_shared
+atlassian_identities: :gitlab_main
+audit_events_external_audit_event_destinations: :gitlab_main
+audit_events: :gitlab_main
+authentication_events: :gitlab_main
+award_emoji: :gitlab_main
+aws_roles: :gitlab_main
+background_migration_jobs: :gitlab_main
+badges: :gitlab_main
+banned_users: :gitlab_main
+batched_background_migration_jobs: :gitlab_main
+batched_background_migrations: :gitlab_main
+board_assignees: :gitlab_main
+board_group_recent_visits: :gitlab_main
+board_labels: :gitlab_main
+board_project_recent_visits: :gitlab_main
+boards_epic_board_labels: :gitlab_main
+boards_epic_board_positions: :gitlab_main
+boards_epic_board_recent_visits: :gitlab_main
+boards_epic_boards: :gitlab_main
+boards_epic_lists: :gitlab_main
+boards_epic_list_user_preferences: :gitlab_main
+boards_epic_user_preferences: :gitlab_main
+boards: :gitlab_main
+board_user_preferences: :gitlab_main
+broadcast_messages: :gitlab_main
+bulk_import_configurations: :gitlab_main
+bulk_import_entities: :gitlab_main
+bulk_import_exports: :gitlab_main
+bulk_import_export_uploads: :gitlab_main
+bulk_import_failures: :gitlab_main
+bulk_imports: :gitlab_main
+bulk_import_trackers: :gitlab_main
+chat_names: :gitlab_main
+chat_teams: :gitlab_main
+ci_build_needs: :gitlab_ci
+ci_build_pending_states: :gitlab_ci
+ci_build_report_results: :gitlab_ci
+ci_builds: :gitlab_ci
+ci_builds_metadata: :gitlab_ci
+ci_builds_runner_session: :gitlab_ci
+ci_build_trace_chunks: :gitlab_ci
+ci_build_trace_metadata: :gitlab_ci
+ci_daily_build_group_report_results: :gitlab_ci
+ci_deleted_objects: :gitlab_ci
+ci_freeze_periods: :gitlab_ci
+ci_group_variables: :gitlab_ci
+ci_instance_variables: :gitlab_ci
+ci_job_artifacts: :gitlab_ci
+ci_job_token_project_scope_links: :gitlab_ci
+ci_job_variables: :gitlab_ci
+ci_minutes_additional_packs: :gitlab_ci
+ci_namespace_monthly_usages: :gitlab_ci
+ci_pending_builds: :gitlab_ci
+ci_pipeline_artifacts: :gitlab_ci
+ci_pipeline_chat_data: :gitlab_ci
+ci_pipeline_messages: :gitlab_ci
+ci_pipeline_schedules: :gitlab_ci
+ci_pipeline_schedule_variables: :gitlab_ci
+ci_pipelines_config: :gitlab_ci
+ci_pipelines: :gitlab_ci
+ci_pipeline_variables: :gitlab_ci
+ci_platform_metrics: :gitlab_ci
+ci_project_monthly_usages: :gitlab_ci
+ci_refs: :gitlab_ci
+ci_resource_groups: :gitlab_ci
+ci_resources: :gitlab_ci
+ci_runner_namespaces: :gitlab_ci
+ci_runner_projects: :gitlab_ci
+ci_runners: :gitlab_ci
+ci_running_builds: :gitlab_ci
+ci_sources_pipelines: :gitlab_ci
+ci_sources_projects: :gitlab_ci
+ci_stages: :gitlab_ci
+ci_subscriptions_projects: :gitlab_ci
+ci_trigger_requests: :gitlab_ci
+ci_triggers: :gitlab_ci
+ci_unit_test_failures: :gitlab_ci
+ci_unit_tests: :gitlab_ci
+ci_variables: :gitlab_ci
+cluster_agents: :gitlab_main
+cluster_agent_tokens: :gitlab_main
+cluster_groups: :gitlab_main
+cluster_platforms_kubernetes: :gitlab_main
+cluster_projects: :gitlab_main
+cluster_providers_aws: :gitlab_main
+cluster_providers_gcp: :gitlab_main
+clusters_applications_cert_managers: :gitlab_main
+clusters_applications_cilium: :gitlab_main
+clusters_applications_crossplane: :gitlab_main
+clusters_applications_elastic_stacks: :gitlab_main
+clusters_applications_helm: :gitlab_main
+clusters_applications_ingress: :gitlab_main
+clusters_applications_jupyter: :gitlab_main
+clusters_applications_knative: :gitlab_main
+clusters_applications_prometheus: :gitlab_main
+clusters_applications_runners: :gitlab_main
+clusters: :gitlab_main
+clusters_integration_elasticstack: :gitlab_main
+clusters_integration_prometheus: :gitlab_main
+clusters_kubernetes_namespaces: :gitlab_main
+commit_user_mentions: :gitlab_main
+compliance_management_frameworks: :gitlab_main
+container_expiration_policies: :gitlab_main
+container_repositories: :gitlab_main
+content_blocked_states: :gitlab_main
+conversational_development_index_metrics: :gitlab_main
+coverage_fuzzing_corpuses: :gitlab_main
+csv_issue_imports: :gitlab_main
+custom_emoji: :gitlab_main
+customer_relations_contacts: :gitlab_main
+customer_relations_organizations: :gitlab_main
+dast_profile_schedules: :gitlab_main
+dast_profiles: :gitlab_main
+dast_profiles_pipelines: :gitlab_main
+dast_scanner_profiles_builds: :gitlab_main
+dast_scanner_profiles: :gitlab_main
+dast_site_profiles_builds: :gitlab_main
+dast_site_profile_secret_variables: :gitlab_main
+dast_site_profiles: :gitlab_main
+dast_site_profiles_pipelines: :gitlab_main
+dast_sites: :gitlab_main
+dast_site_tokens: :gitlab_main
+dast_site_validations: :gitlab_main
+dependency_proxy_blobs: :gitlab_main
+dependency_proxy_group_settings: :gitlab_main
+dependency_proxy_image_ttl_group_policies: :gitlab_main
+dependency_proxy_manifests: :gitlab_main
+deploy_keys_projects: :gitlab_main
+deployment_clusters: :gitlab_main
+deployment_merge_requests: :gitlab_main
+deployments: :gitlab_main
+deploy_tokens: :gitlab_main
+description_versions: :gitlab_main
+design_management_designs: :gitlab_main
+design_management_designs_versions: :gitlab_main
+design_management_versions: :gitlab_main
+design_user_mentions: :gitlab_main
+detached_partitions: :gitlab_shared
+diff_note_positions: :gitlab_main
+dora_daily_metrics: :gitlab_main
+draft_notes: :gitlab_main
+elastic_index_settings: :gitlab_main
+elastic_reindexing_slices: :gitlab_main
+elastic_reindexing_subtasks: :gitlab_main
+elastic_reindexing_tasks: :gitlab_main
+elasticsearch_indexed_namespaces: :gitlab_main
+elasticsearch_indexed_projects: :gitlab_main
+emails: :gitlab_main
+environments: :gitlab_main
+epic_issues: :gitlab_main
+epic_metrics: :gitlab_main
+epics: :gitlab_main
+epic_user_mentions: :gitlab_main
+error_tracking_client_keys: :gitlab_main
+error_tracking_error_events: :gitlab_main
+error_tracking_errors: :gitlab_main
+events: :gitlab_main
+evidences: :gitlab_main
+experiments: :gitlab_main
+experiment_subjects: :gitlab_main
+experiment_users: :gitlab_main
+external_approval_rules: :gitlab_main
+external_approval_rules_protected_branches: :gitlab_main
+external_pull_requests: :gitlab_main
+external_status_checks: :gitlab_main
+external_status_checks_protected_branches: :gitlab_main
+feature_gates: :gitlab_main
+features: :gitlab_main
+fork_network_members: :gitlab_main
+fork_networks: :gitlab_main
+geo_cache_invalidation_events: :gitlab_main
+geo_container_repository_updated_events: :gitlab_main
+geo_event_log: :gitlab_main
+geo_events: :gitlab_main
+geo_hashed_storage_attachments_events: :gitlab_main
+geo_hashed_storage_migrated_events: :gitlab_main
+geo_job_artifact_deleted_events: :gitlab_main
+geo_lfs_object_deleted_events: :gitlab_main
+geo_node_namespace_links: :gitlab_main
+geo_nodes: :gitlab_main
+geo_node_statuses: :gitlab_main
+geo_repositories_changed_events: :gitlab_main
+geo_repository_created_events: :gitlab_main
+geo_repository_deleted_events: :gitlab_main
+geo_repository_renamed_events: :gitlab_main
+geo_repository_updated_events: :gitlab_main
+geo_reset_checksum_events: :gitlab_main
+gitlab_subscription_histories: :gitlab_main
+gitlab_subscriptions: :gitlab_main
+gpg_keys: :gitlab_main
+gpg_key_subkeys: :gitlab_main
+gpg_signatures: :gitlab_main
+grafana_integrations: :gitlab_main
+group_custom_attributes: :gitlab_main
+group_deletion_schedules: :gitlab_main
+group_deploy_keys: :gitlab_main
+group_deploy_keys_groups: :gitlab_main
+group_deploy_tokens: :gitlab_main
+group_group_links: :gitlab_main
+group_import_states: :gitlab_main
+group_merge_request_approval_settings: :gitlab_main
+group_repository_storage_moves: :gitlab_main
+group_wiki_repositories: :gitlab_main
+historical_data: :gitlab_main
+identities: :gitlab_main
+import_export_uploads: :gitlab_main
+import_failures: :gitlab_main
+incident_management_escalation_policies: :gitlab_main
+incident_management_escalation_rules: :gitlab_main
+incident_management_issuable_escalation_statuses: :gitlab_main
+incident_management_oncall_participants: :gitlab_main
+incident_management_oncall_rotations: :gitlab_main
+incident_management_oncall_schedules: :gitlab_main
+incident_management_oncall_shifts: :gitlab_main
+incident_management_pending_alert_escalations: :gitlab_main
+incident_management_pending_issue_escalations: :gitlab_main
+index_statuses: :gitlab_main
+in_product_marketing_emails: :gitlab_main
+insights: :gitlab_main
+integrations: :gitlab_main
+internal_ids: :gitlab_main
+ip_restrictions: :gitlab_main
+issuable_metric_images: :gitlab_main
+issuable_severities: :gitlab_main
+issuable_slas: :gitlab_main
+issue_assignees: :gitlab_main
+issue_customer_relations_contacts: :gitlab_main
+issue_email_participants: :gitlab_main
+issue_links: :gitlab_main
+issue_metrics: :gitlab_main
+issues: :gitlab_main
+issues_prometheus_alert_events: :gitlab_main
+issues_self_managed_prometheus_alert_events: :gitlab_main
+issue_tracker_data: :gitlab_main
+issue_user_mentions: :gitlab_main
+iterations_cadences: :gitlab_main
+jira_connect_installations: :gitlab_main
+jira_connect_subscriptions: :gitlab_main
+jira_imports: :gitlab_main
+jira_tracker_data: :gitlab_main
+keys: :gitlab_main
+label_links: :gitlab_main
+label_priorities: :gitlab_main
+labels: :gitlab_main
+ldap_group_links: :gitlab_main
+lfs_file_locks: :gitlab_main
+lfs_objects: :gitlab_main
+lfs_objects_projects: :gitlab_main
+licenses: :gitlab_main
+lists: :gitlab_main
+list_user_preferences: :gitlab_main
+loose_foreign_keys_deleted_records: :gitlab_shared
+member_tasks: :gitlab_main
+members: :gitlab_main
+merge_request_assignees: :gitlab_main
+merge_request_blocks: :gitlab_main
+merge_request_cleanup_schedules: :gitlab_main
+merge_request_context_commit_diff_files: :gitlab_main
+merge_request_context_commits: :gitlab_main
+merge_request_diff_commits: :gitlab_main
+merge_request_diff_commit_users: :gitlab_main
+merge_request_diff_details: :gitlab_main
+merge_request_diff_files: :gitlab_main
+merge_request_diffs: :gitlab_main
+merge_request_metrics: :gitlab_main
+merge_request_reviewers: :gitlab_main
+merge_requests_closing_issues: :gitlab_main
+merge_requests: :gitlab_main
+merge_request_user_mentions: :gitlab_main
+merge_trains: :gitlab_main
+metrics_dashboard_annotations: :gitlab_main
+metrics_users_starred_dashboards: :gitlab_main
+milestone_releases: :gitlab_main
+milestones: :gitlab_main
+namespace_admin_notes: :gitlab_main
+namespace_aggregation_schedules: :gitlab_main
+namespace_limits: :gitlab_main
+namespace_package_settings: :gitlab_main
+namespace_root_storage_statistics: :gitlab_main
+namespace_settings: :gitlab_main
+namespaces: :gitlab_main
+namespace_statistics: :gitlab_main
+note_diff_files: :gitlab_main
+notes: :gitlab_main
+notification_settings: :gitlab_main
+oauth_access_grants: :gitlab_main
+oauth_access_tokens: :gitlab_main
+oauth_applications: :gitlab_main
+oauth_openid_requests: :gitlab_main
+onboarding_progresses: :gitlab_main
+operations_feature_flags_clients: :gitlab_main
+operations_feature_flag_scopes: :gitlab_main
+operations_feature_flags: :gitlab_main
+operations_feature_flags_issues: :gitlab_main
+operations_scopes: :gitlab_main
+operations_strategies: :gitlab_main
+operations_strategies_user_lists: :gitlab_main
+operations_user_lists: :gitlab_main
+packages_build_infos: :gitlab_main
+packages_composer_cache_files: :gitlab_main
+packages_composer_metadata: :gitlab_main
+packages_conan_file_metadata: :gitlab_main
+packages_conan_metadata: :gitlab_main
+packages_debian_file_metadata: :gitlab_main
+packages_debian_group_architectures: :gitlab_main
+packages_debian_group_component_files: :gitlab_main
+packages_debian_group_components: :gitlab_main
+packages_debian_group_distribution_keys: :gitlab_main
+packages_debian_group_distributions: :gitlab_main
+packages_debian_project_architectures: :gitlab_main
+packages_debian_project_component_files: :gitlab_main
+packages_debian_project_components: :gitlab_main
+packages_debian_project_distribution_keys: :gitlab_main
+packages_debian_project_distributions: :gitlab_main
+packages_debian_publications: :gitlab_main
+packages_dependencies: :gitlab_main
+packages_dependency_links: :gitlab_main
+packages_events: :gitlab_main
+packages_helm_file_metadata: :gitlab_main
+packages_maven_metadata: :gitlab_main
+packages_npm_metadata: :gitlab_main
+packages_nuget_dependency_link_metadata: :gitlab_main
+packages_nuget_metadata: :gitlab_main
+packages_package_file_build_infos: :gitlab_main
+packages_package_files: :gitlab_main
+packages_packages: :gitlab_main
+packages_pypi_metadata: :gitlab_main
+packages_rubygems_metadata: :gitlab_main
+packages_tags: :gitlab_main
+pages_deployments: :gitlab_main
+pages_domain_acme_orders: :gitlab_main
+pages_domains: :gitlab_main
+partitioned_foreign_keys: :gitlab_main
+path_locks: :gitlab_main
+personal_access_tokens: :gitlab_main
+plan_limits: :gitlab_main
+plans: :gitlab_main
+pool_repositories: :gitlab_main
+postgres_async_indexes: :gitlab_shared
+postgres_foreign_keys: :gitlab_shared
+postgres_index_bloat_estimates: :gitlab_shared
+postgres_indexes: :gitlab_shared
+postgres_partitioned_tables: :gitlab_shared
+postgres_partitions: :gitlab_shared
+postgres_reindex_actions: :gitlab_shared
+postgres_reindex_queued_actions: :gitlab_main
+product_analytics_events_experimental: :gitlab_main
+programming_languages: :gitlab_main
+project_access_tokens: :gitlab_main
+project_alerting_settings: :gitlab_main
+project_aliases: :gitlab_main
+project_authorizations: :gitlab_main
+project_auto_devops: :gitlab_main
+project_ci_cd_settings: :gitlab_main
+project_ci_feature_usages: :gitlab_main
+project_compliance_framework_settings: :gitlab_main
+project_custom_attributes: :gitlab_main
+project_daily_statistics: :gitlab_main
+project_deploy_tokens: :gitlab_main
+project_error_tracking_settings: :gitlab_main
+project_export_jobs: :gitlab_main
+project_features: :gitlab_main
+project_feature_usages: :gitlab_main
+project_group_links: :gitlab_main
+project_import_data: :gitlab_main
+project_incident_management_settings: :gitlab_main
+project_metrics_settings: :gitlab_main
+project_mirror_data: :gitlab_main
+project_pages_metadata: :gitlab_main
+project_repositories: :gitlab_main
+project_repository_states: :gitlab_main
+project_repository_storage_moves: :gitlab_main
+project_security_settings: :gitlab_main
+project_settings: :gitlab_main
+projects: :gitlab_main
+project_statistics: :gitlab_main
+project_topics: :gitlab_main
+project_tracing_settings: :gitlab_main
+prometheus_alert_events: :gitlab_main
+prometheus_alerts: :gitlab_main
+prometheus_metrics: :gitlab_main
+protected_branches: :gitlab_main
+protected_branch_merge_access_levels: :gitlab_main
+protected_branch_push_access_levels: :gitlab_main
+protected_branch_unprotect_access_levels: :gitlab_main
+protected_environment_deploy_access_levels: :gitlab_main
+protected_environments: :gitlab_main
+protected_tag_create_access_levels: :gitlab_main
+protected_tags: :gitlab_main
+push_event_payloads: :gitlab_main
+push_rules: :gitlab_main
+raw_usage_data: :gitlab_main
+redirect_routes: :gitlab_main
+release_links: :gitlab_main
+releases: :gitlab_main
+remote_mirrors: :gitlab_main
+repository_languages: :gitlab_main
+required_code_owners_sections: :gitlab_main
+requirements: :gitlab_main
+requirements_management_test_reports: :gitlab_main
+resource_iteration_events: :gitlab_main
+resource_label_events: :gitlab_main
+resource_milestone_events: :gitlab_main
+resource_state_events: :gitlab_main
+resource_weight_events: :gitlab_main
+reviews: :gitlab_main
+routes: :gitlab_main
+saml_group_links: :gitlab_main
+saml_providers: :gitlab_main
+schema_migrations: :gitlab_shared
+scim_identities: :gitlab_main
+scim_oauth_access_tokens: :gitlab_main
+security_findings: :gitlab_main
+security_orchestration_policy_configurations: :gitlab_main
+security_orchestration_policy_rule_schedules: :gitlab_main
+security_scans: :gitlab_main
+self_managed_prometheus_alert_events: :gitlab_main
+sent_notifications: :gitlab_main
+sentry_issues: :gitlab_main
+serverless_domain_cluster: :gitlab_main
+service_desk_settings: :gitlab_main
+shards: :gitlab_main
+slack_integrations: :gitlab_main
+smartcard_identities: :gitlab_main
+snippet_repositories: :gitlab_main
+snippet_repository_storage_moves: :gitlab_main
+snippets: :gitlab_main
+snippet_statistics: :gitlab_main
+snippet_user_mentions: :gitlab_main
+software_license_policies: :gitlab_main
+software_licenses: :gitlab_main
+spam_logs: :gitlab_main
+sprints: :gitlab_main
+status_check_responses: :gitlab_main
+status_page_published_incidents: :gitlab_main
+status_page_settings: :gitlab_main
+subscriptions: :gitlab_main
+suggestions: :gitlab_main
+system_note_metadata: :gitlab_main
+taggings: :gitlab_ci
+tags: :gitlab_ci
+term_agreements: :gitlab_main
+terraform_states: :gitlab_main
+terraform_state_versions: :gitlab_main
+timelogs: :gitlab_main
+todos: :gitlab_main
+token_with_ivs: :gitlab_main
+topics: :gitlab_main
+trending_projects: :gitlab_main
+u2f_registrations: :gitlab_main
+upcoming_reconciliations: :gitlab_main
+uploads: :gitlab_main
+user_agent_details: :gitlab_main
+user_callouts: :gitlab_main
+user_canonical_emails: :gitlab_main
+user_credit_card_validations: :gitlab_main
+user_custom_attributes: :gitlab_main
+user_details: :gitlab_main
+user_follow_users: :gitlab_main
+user_group_callouts: :gitlab_main
+user_highest_roles: :gitlab_main
+user_interacted_projects: :gitlab_main
+user_permission_export_uploads: :gitlab_main
+user_preferences: :gitlab_main
+users: :gitlab_main
+users_ops_dashboard_projects: :gitlab_main
+users_security_dashboard_projects: :gitlab_main
+users_star_projects: :gitlab_main
+users_statistics: :gitlab_main
+user_statuses: :gitlab_main
+user_synced_attributes_metadata: :gitlab_main
+verification_codes: :gitlab_main
+vulnerabilities: :gitlab_main
+vulnerability_exports: :gitlab_main
+vulnerability_external_issue_links: :gitlab_main
+vulnerability_feedback: :gitlab_main
+vulnerability_finding_evidence_assets: :gitlab_main
+vulnerability_finding_evidence_headers: :gitlab_main
+vulnerability_finding_evidence_requests: :gitlab_main
+vulnerability_finding_evidence_responses: :gitlab_main
+vulnerability_finding_evidences: :gitlab_main
+vulnerability_finding_evidence_sources: :gitlab_main
+vulnerability_finding_evidence_supporting_messages: :gitlab_main
+vulnerability_finding_links: :gitlab_main
+vulnerability_finding_signatures: :gitlab_main
+vulnerability_findings_remediations: :gitlab_main
+vulnerability_flags: :gitlab_main
+vulnerability_historical_statistics: :gitlab_main
+vulnerability_identifiers: :gitlab_main
+vulnerability_issue_links: :gitlab_main
+vulnerability_occurrence_identifiers: :gitlab_main
+vulnerability_occurrence_pipelines: :gitlab_main
+vulnerability_occurrences: :gitlab_main
+vulnerability_remediations: :gitlab_main
+vulnerability_scanners: :gitlab_main
+vulnerability_statistics: :gitlab_main
+vulnerability_user_mentions: :gitlab_main
+webauthn_registrations: :gitlab_main
+web_hook_logs: :gitlab_main
+web_hooks: :gitlab_main
+wiki_page_meta: :gitlab_main
+wiki_page_slugs: :gitlab_main
+work_item_types: :gitlab_main
+x509_certificates: :gitlab_main
+x509_commit_signatures: :gitlab_main
+x509_issuers: :gitlab_main
+zentao_tracker_data: :gitlab_main
+zoom_meetings: :gitlab_main
diff --git a/lib/gitlab/database/load_balancing.rb b/lib/gitlab/database/load_balancing.rb
index 3e322e752b7..52eb0764ae3 100644
--- a/lib/gitlab/database/load_balancing.rb
+++ b/lib/gitlab/database/load_balancing.rb
@@ -26,7 +26,7 @@ module Gitlab
return to_enum(__method__) unless block_given?
base_models.each do |model|
- yield model.connection.load_balancer
+ yield model.load_balancer
end
end
diff --git a/lib/gitlab/database/load_balancing/configuration.rb b/lib/gitlab/database/load_balancing/configuration.rb
index 6156515bd73..da313361073 100644
--- a/lib/gitlab/database/load_balancing/configuration.rb
+++ b/lib/gitlab/database/load_balancing/configuration.rb
@@ -7,7 +7,7 @@ module Gitlab
class Configuration
attr_accessor :hosts, :max_replication_difference,
:max_replication_lag_time, :replica_check_interval,
- :service_discovery, :model
+ :service_discovery
# Creates a configuration object for the given ActiveRecord model.
def self.for_model(model)
@@ -41,6 +41,8 @@ module Gitlab
end
end
+ config.reuse_primary_connection!
+
config
end
@@ -59,6 +61,28 @@ module Gitlab
disconnect_timeout: 120,
use_tcp: false
}
+
+ # Temporary model for GITLAB_LOAD_BALANCING_REUSE_PRIMARY_
+ # To be removed with FF
+ @primary_model = nil
+ end
+
+ def db_config_name
+ @model.connection_db_config.name.to_sym
+ end
+
+ # With connection re-use the primary connection can be overwritten
+ # to be used from different model
+ def primary_connection_specification_name
+ (@primary_model || @model).connection_specification_name
+ end
+
+ def primary_db_config
+ (@primary_model || @model).connection_db_config
+ end
+
+ def replica_db_config
+ @model.connection_db_config
end
def pool_size
@@ -86,6 +110,30 @@ module Gitlab
def service_discovery_enabled?
service_discovery[:record].present?
end
+
+ # TODO: This is temporary code to allow re-use of primary connection
+ # if the two connections are pointing to the same host. This is needed
+ # to properly support transaction visibility.
+ #
+ # This behavior is required to support [Phase 3](https://gitlab.com/groups/gitlab-org/-/epics/6160#progress).
+ # This method is meant to be removed as soon as it is finished.
+ #
+ # The remapping is done as-is:
+ # export GITLAB_LOAD_BALANCING_REUSE_PRIMARY_<name-of-connection>=<new-name-of-connection>
+ #
+ # Ex.:
+ # export GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main
+ #
+ def reuse_primary_connection!
+ new_connection = ENV["GITLAB_LOAD_BALANCING_REUSE_PRIMARY_#{db_config_name}"]
+ return unless new_connection.present?
+
+ @primary_model = Gitlab::Database.database_base_models[new_connection.to_sym]
+
+ unless @primary_model
+ raise "Invalid value for 'GITLAB_LOAD_BALANCING_REUSE_PRIMARY_#{db_config_name}=#{new_connection}'"
+ end
+ end
end
end
end
diff --git a/lib/gitlab/database/load_balancing/connection_proxy.rb b/lib/gitlab/database/load_balancing/connection_proxy.rb
index 1be63da8896..a91df2eccdd 100644
--- a/lib/gitlab/database/load_balancing/connection_proxy.rb
+++ b/lib/gitlab/database/load_balancing/connection_proxy.rb
@@ -13,6 +13,13 @@ module Gitlab
WriteInsideReadOnlyTransactionError = Class.new(StandardError)
READ_ONLY_TRANSACTION_KEY = :load_balacing_read_only_transaction
+ # The load balancer returned by connection might be different
+ # between `model.connection.load_balancer` vs `model.load_balancer`
+ #
+ # The used `model.connection` is dependent on `use_model_load_balancing`.
+ # See more in: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73949.
+ #
+ # Always use `model.load_balancer` or `model.sticking`.
attr_reader :load_balancer
# These methods perform writes after which we need to stick to the
diff --git a/lib/gitlab/database/load_balancing/load_balancer.rb b/lib/gitlab/database/load_balancing/load_balancer.rb
index 2be7f0baa60..1e27bcfc55d 100644
--- a/lib/gitlab/database/load_balancing/load_balancer.rb
+++ b/lib/gitlab/database/load_balancing/load_balancer.rb
@@ -12,7 +12,7 @@ module Gitlab
REPLICA_SUFFIX = '_replica'
- attr_reader :name, :host_list, :configuration
+ attr_reader :host_list, :configuration
# configuration - An instance of `LoadBalancing::Configuration` that
# contains the configuration details (such as the hosts)
@@ -26,8 +26,10 @@ module Gitlab
else
HostList.new(configuration.hosts.map { |addr| Host.new(addr, self) })
end
+ end
- @name = @configuration.model.connection_db_config.name.to_sym
+ def name
+ @configuration.db_config_name
end
def primary_only?
@@ -64,7 +66,7 @@ module Gitlab
# times before using the primary instead.
will_retry = conflict_retried < @host_list.length * 3
- LoadBalancing::Logger.warn(
+ ::Gitlab::Database::LoadBalancing::Logger.warn(
event: :host_query_conflict,
message: 'Query conflict on host',
conflict_retried: conflict_retried,
@@ -89,7 +91,7 @@ module Gitlab
end
end
- LoadBalancing::Logger.warn(
+ ::Gitlab::Database::LoadBalancing::Logger.warn(
event: :no_secondaries_available,
message: 'No secondaries were available, using primary instead',
conflict_retried: conflict_retried,
@@ -136,7 +138,7 @@ module Gitlab
# Returns the transaction write location of the primary.
def primary_write_location
location = read_write do |connection|
- ::Gitlab::Database.main.get_write_location(connection)
+ get_write_location(connection)
end
return location if location
@@ -230,7 +232,7 @@ module Gitlab
# host - An optional host name to use instead of the default one.
# port - An optional port to connect to.
def create_replica_connection_pool(pool_size, host = nil, port = nil)
- db_config = pool.db_config
+ db_config = @configuration.replica_db_config
env_config = db_config.configuration_hash.dup
env_config[:pool] = pool_size
@@ -255,22 +257,67 @@ module Gitlab
# leverage that.
def pool
ActiveRecord::Base.connection_handler.retrieve_connection_pool(
- @configuration.model.connection_specification_name,
+ @configuration.primary_connection_specification_name,
role: ActiveRecord::Base.writing_role,
shard: ActiveRecord::Base.default_shard
) || raise(::ActiveRecord::ConnectionNotEstablished)
end
+ def wal_diff(location1, location2)
+ read_write do |connection|
+ lsn1 = connection.quote(location1)
+ lsn2 = connection.quote(location2)
+
+ query = <<-SQL.squish
+ SELECT pg_wal_lsn_diff(#{lsn1}, #{lsn2})
+ AS result
+ SQL
+
+ row = connection.select_all(query).first
+ row['result'] if row
+ end
+ end
+
private
def ensure_caching!
- host.enable_query_cache! unless host.query_cache_enabled
+ return unless Rails.application.executor.active?
+ return if host.query_cache_enabled
+
+ host.enable_query_cache!
end
def request_cache
base = SafeRequestStore[:gitlab_load_balancer] ||= {}
base[self] ||= {}
end
+
+ # @param [ActiveRecord::Connection] ar_connection
+ # @return [String]
+ def get_write_location(ar_connection)
+ use_new_load_balancer_query = Gitlab::Utils
+ .to_boolean(ENV['USE_NEW_LOAD_BALANCER_QUERY'], default: true)
+
+ sql =
+ if use_new_load_balancer_query
+ <<~NEWSQL
+ SELECT CASE
+ WHEN pg_is_in_recovery() = true AND EXISTS (SELECT 1 FROM pg_stat_get_wal_senders())
+ THEN pg_last_wal_replay_lsn()::text
+ WHEN pg_is_in_recovery() = false
+ THEN pg_current_wal_insert_lsn()::text
+ ELSE NULL
+ END AS location;
+ NEWSQL
+ else
+ <<~SQL
+ SELECT pg_current_wal_insert_lsn()::text AS location
+ SQL
+ end
+
+ row = ar_connection.select_all(sql).first
+ row['location'] if row
+ end
end
end
end
diff --git a/lib/gitlab/database/load_balancing/primary_host.rb b/lib/gitlab/database/load_balancing/primary_host.rb
index 7070cc54d4b..fb52b384ddb 100644
--- a/lib/gitlab/database/load_balancing/primary_host.rb
+++ b/lib/gitlab/database/load_balancing/primary_host.rb
@@ -49,6 +49,11 @@ module Gitlab
end
def offline!
+ ::Gitlab::Database::LoadBalancing::Logger.warn(
+ event: :host_offline,
+ message: 'Marking primary host as offline'
+ )
+
nil
end
diff --git a/lib/gitlab/database/load_balancing/rack_middleware.rb b/lib/gitlab/database/load_balancing/rack_middleware.rb
index 7ce7649cc22..99b1c31b04b 100644
--- a/lib/gitlab/database/load_balancing/rack_middleware.rb
+++ b/lib/gitlab/database/load_balancing/rack_middleware.rb
@@ -38,8 +38,8 @@ module Gitlab
def unstick_or_continue_sticking(env)
namespaces_and_ids = sticking_namespaces(env)
- namespaces_and_ids.each do |(model, namespace, id)|
- model.sticking.unstick_or_continue_sticking(namespace, id)
+ namespaces_and_ids.each do |(sticking, namespace, id)|
+ sticking.unstick_or_continue_sticking(namespace, id)
end
end
@@ -47,8 +47,8 @@ module Gitlab
def stick_if_necessary(env)
namespaces_and_ids = sticking_namespaces(env)
- namespaces_and_ids.each do |model, namespace, id|
- model.sticking.stick_if_necessary(namespace, id)
+ namespaces_and_ids.each do |sticking, namespace, id|
+ sticking.stick_if_necessary(namespace, id)
end
end
@@ -74,7 +74,7 @@ module Gitlab
# models that support load balancing. In the future (if we
# determined this to be OK) we may be able to relax this.
::Gitlab::Database::LoadBalancing.base_models.map do |model|
- [model, :user, warden.user.id]
+ [model.sticking, :user, warden.user.id]
end
elsif env[STICK_OBJECT].present?
env[STICK_OBJECT].to_a
diff --git a/lib/gitlab/database/load_balancing/setup.rb b/lib/gitlab/database/load_balancing/setup.rb
index 3cce839a960..ef38f42f50b 100644
--- a/lib/gitlab/database/load_balancing/setup.rb
+++ b/lib/gitlab/database/load_balancing/setup.rb
@@ -5,7 +5,7 @@ module Gitlab
module LoadBalancing
# Class for setting up load balancing of a specific model.
class Setup
- attr_reader :configuration
+ attr_reader :model, :configuration
def initialize(model, start_service_discovery: false)
@model = model
@@ -14,47 +14,102 @@ module Gitlab
end
def setup
- disable_prepared_statements
- setup_load_balancer
+ configure_connection
+ setup_connection_proxy
setup_service_discovery
+ setup_feature_flag_to_model_load_balancing
end
- def disable_prepared_statements
+ def configure_connection
db_config_object = @model.connection_db_config
- config =
- db_config_object.configuration_hash.merge(prepared_statements: false)
+
+ hash = db_config_object.configuration_hash.merge(
+ prepared_statements: false,
+ pool: Gitlab::Database.default_pool_size
+ )
hash_config = ActiveRecord::DatabaseConfigurations::HashConfig.new(
db_config_object.env_name,
db_config_object.name,
- config
+ hash
)
@model.establish_connection(hash_config)
end
- def setup_load_balancer
- lb = LoadBalancer.new(configuration)
-
+ def setup_connection_proxy
# We just use a simple `class_attribute` here so we don't need to
# inject any modules and/or expose unnecessary methods.
- @model.class_attribute(:connection)
- @model.class_attribute(:sticking)
+ setup_class_attribute(:load_balancer, load_balancer)
+ setup_class_attribute(:connection, ConnectionProxy.new(load_balancer))
+ setup_class_attribute(:sticking, Sticking.new(load_balancer))
+ end
+
+ # TODO: This is temporary code to gradually redirect traffic to use
+ # a dedicated DB replicas, or DB primaries (depending on configuration)
+ # This implements a sticky behavior for the current request if enabled.
+ #
+ # This is needed for Phase 3 and Phase 4 of application rollout
+ # https://gitlab.com/groups/gitlab-org/-/epics/6160#progress
+ #
+ # If `GITLAB_USE_MODEL_LOAD_BALANCING` is set, its value is preferred
+ # Otherwise, a `use_model_load_balancing` FF value is used
+ def setup_feature_flag_to_model_load_balancing
+ return if active_record_base?
- @model.connection = ConnectionProxy.new(lb)
- @model.sticking = Sticking.new(lb)
+ @model.singleton_class.prepend(ModelLoadBalancingFeatureFlagMixin)
end
def setup_service_discovery
return unless configuration.service_discovery_enabled?
- lb = @model.connection.load_balancer
- sv = ServiceDiscovery.new(lb, **configuration.service_discovery)
+ sv = ServiceDiscovery.new(load_balancer, **configuration.service_discovery)
sv.perform_service_discovery
sv.start if @start_service_discovery
end
+
+ def load_balancer
+ @load_balancer ||= LoadBalancer.new(configuration)
+ end
+
+ private
+
+ def setup_class_attribute(attribute, value)
+ @model.class_attribute(attribute)
+ @model.public_send("#{attribute}=", value) # rubocop:disable GitlabSecurity/PublicSend
+ end
+
+ def active_record_base?
+ @model == ActiveRecord::Base
+ end
+
+ module ModelLoadBalancingFeatureFlagMixin
+ extend ActiveSupport::Concern
+
+ def use_model_load_balancing?
+ # Cache environment variable and return env variable first if defined
+ use_model_load_balancing_env = Gitlab::Utils.to_boolean(ENV["GITLAB_USE_MODEL_LOAD_BALANCING"])
+
+ unless use_model_load_balancing_env.nil?
+ return use_model_load_balancing_env
+ end
+
+ # Check a feature flag using RequestStore (if active)
+ return false unless Gitlab::SafeRequestStore.active?
+
+ Gitlab::SafeRequestStore.fetch(:use_model_load_balancing) do
+ Feature.enabled?(:use_model_load_balancing, default_enabled: :yaml)
+ end
+ end
+
+ # rubocop:disable Database/MultipleDatabases
+ def connection
+ use_model_load_balancing? ? super : ActiveRecord::Base.connection
+ end
+ # rubocop:enable Database/MultipleDatabases
+ end
end
end
end
diff --git a/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb b/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb
index f0c7016032b..b9acc36b4cc 100644
--- a/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb
+++ b/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb
@@ -13,7 +13,7 @@ module Gitlab
job['load_balancing_strategy'] = strategy.to_s
if use_primary?(strategy)
- Session.current.use_primary!
+ ::Gitlab::Database::LoadBalancing::Session.current.use_primary!
elsif strategy == :retry
raise JobReplicaNotUpToDate, "Sidekiq job #{worker_class} JID-#{job['jid']} couldn't use the replica."\
" Replica was not up to date."
@@ -29,8 +29,8 @@ module Gitlab
private
def clear
- LoadBalancing.release_hosts
- Session.clear_session
+ ::Gitlab::Database::LoadBalancing.release_hosts
+ ::Gitlab::Database::LoadBalancing::Session.clear_session
end
def use_primary?(strategy)
@@ -66,7 +66,7 @@ module Gitlab
def legacy_wal_location(job)
wal_location = job['database_write_location'] || job['database_replica_location']
- { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => wal_location } if wal_location
+ { ::Gitlab::Database::MAIN_DATABASE_NAME.to_sym => wal_location } if wal_location
end
def load_balancing_available?(worker_class)
@@ -90,7 +90,7 @@ module Gitlab
end
def databases_in_sync?(wal_locations)
- LoadBalancing.each_load_balancer.all? do |lb|
+ ::Gitlab::Database::LoadBalancing.each_load_balancer.all? do |lb|
if (location = wal_locations[lb.name])
lb.select_up_to_date_host(location)
else
diff --git a/lib/gitlab/database/load_balancing/sticking.rb b/lib/gitlab/database/load_balancing/sticking.rb
index df4ad18581f..834e9c6d3c6 100644
--- a/lib/gitlab/database/load_balancing/sticking.rb
+++ b/lib/gitlab/database/load_balancing/sticking.rb
@@ -12,7 +12,6 @@ module Gitlab
def initialize(load_balancer)
@load_balancer = load_balancer
- @model = load_balancer.configuration.model
end
# Unsticks or continues sticking the current request.
@@ -27,8 +26,8 @@ module Gitlab
def stick_or_unstick_request(env, namespace, id)
unstick_or_continue_sticking(namespace, id)
- env[RackMiddleware::STICK_OBJECT] ||= Set.new
- env[RackMiddleware::STICK_OBJECT] << [@model, namespace, id]
+ env[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT] ||= Set.new
+ env[::Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT] << [self, namespace, id]
end
# Sticks to the primary if a write was performed.
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index 9968096b1f6..7dce4fa0ce2 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -10,8 +10,6 @@ module Gitlab
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
MAX_IDENTIFIER_NAME_LENGTH = 63
-
- PERMITTED_TIMESTAMP_COLUMNS = %i[created_at updated_at deleted_at].to_set.freeze
DEFAULT_TIMESTAMP_COLUMNS = %i[created_at updated_at].freeze
# Adds `created_at` and `updated_at` columns with timezone information.
@@ -28,33 +26,23 @@ module Gitlab
# :default - The default value for the column.
# :null - When set to `true` the column will allow NULL values.
# The default is to not allow NULL values.
- # :columns - the column names to create. Must be one
- # of `Gitlab::Database::MigrationHelpers::PERMITTED_TIMESTAMP_COLUMNS`.
+ # :columns - the column names to create. Must end with `_at`.
# Default value: `DEFAULT_TIMESTAMP_COLUMNS`
#
# All options are optional.
def add_timestamps_with_timezone(table_name, options = {})
- options[:null] = false if options[:null].nil?
columns = options.fetch(:columns, DEFAULT_TIMESTAMP_COLUMNS)
- default_value = options[:default]
-
- validate_not_in_transaction!(:add_timestamps_with_timezone, 'with default value') if default_value
columns.each do |column_name|
validate_timestamp_column_name!(column_name)
- # If default value is presented, use `add_column_with_default` method instead.
- if default_value
- add_column_with_default(
- table_name,
- column_name,
- :datetime_with_timezone,
- default: default_value,
- allow_null: options[:null]
- )
- else
- add_column(table_name, column_name, :datetime_with_timezone, **options)
- end
+ add_column(
+ table_name,
+ column_name,
+ :datetime_with_timezone,
+ default: options[:default],
+ null: options[:null] || false
+ )
end
end
@@ -147,8 +135,18 @@ module Gitlab
options = options.merge({ algorithm: :concurrently })
if index_exists?(table_name, column_name, **options)
- Gitlab::AppLogger.warn "Index not created because it already exists (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}"
- return
+ name = options[:name] || index_name(table_name, column_name)
+ _, schema = table_name.to_s.split('.').reverse
+
+ if index_invalid?(name, schema: schema)
+ say "Index being recreated because the existing version was INVALID: table_name: #{table_name}, column_name: #{column_name}"
+
+ remove_concurrent_index_by_name(table_name, name)
+ else
+ say "Index not created because it already exists (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}"
+
+ return
+ end
end
disable_statement_timeout do
@@ -159,6 +157,23 @@ module Gitlab
unprepare_async_index(table_name, column_name, **options)
end
+ def index_invalid?(index_name, schema: nil)
+ index_name = connection.quote(index_name)
+ schema = connection.quote(schema) if schema
+ schema ||= 'current_schema()'
+
+ connection.select_value(<<~SQL)
+ select not i.indisvalid
+ from pg_class c
+ inner join pg_index i
+ on c.oid = i.indexrelid
+ inner join pg_namespace n
+ on n.oid = c.relnamespace
+ where n.nspname = #{schema}
+ and c.relname = #{index_name}
+ SQL
+ end
+
# Removes an existed index, concurrently
#
# Example:
@@ -1245,8 +1260,8 @@ module Gitlab
def check_trigger_permissions!(table)
unless Grant.create_and_execute_trigger?(table)
- dbname = Database.main.database_name
- user = Database.main.username
+ dbname = ApplicationRecord.database.database_name
+ user = ApplicationRecord.database.username
raise <<-EOF
Your database user is not allowed to create, drop, or execute triggers on the
@@ -1568,8 +1583,8 @@ into similar problems in the future (e.g. when new tables are created).
def create_extension(extension)
execute('CREATE EXTENSION IF NOT EXISTS %s' % extension)
rescue ActiveRecord::StatementInvalid => e
- dbname = Database.main.database_name
- user = Database.main.username
+ dbname = ApplicationRecord.database.database_name
+ user = ApplicationRecord.database.username
warn(<<~MSG) if e.to_s =~ /permission denied/
GitLab requires the PostgreSQL extension '#{extension}' installed in database '#{dbname}', but
@@ -1596,8 +1611,8 @@ into similar problems in the future (e.g. when new tables are created).
def drop_extension(extension)
execute('DROP EXTENSION IF EXISTS %s' % extension)
rescue ActiveRecord::StatementInvalid => e
- dbname = Database.main.database_name
- user = Database.main.username
+ dbname = ApplicationRecord.database.database_name
+ user = ApplicationRecord.database.username
warn(<<~MSG) if e.to_s =~ /permission denied/
This migration attempts to drop the PostgreSQL extension '#{extension}'
@@ -1791,11 +1806,11 @@ into similar problems in the future (e.g. when new tables are created).
end
def validate_timestamp_column_name!(column_name)
- return if PERMITTED_TIMESTAMP_COLUMNS.member?(column_name)
+ return if column_name.to_s.end_with?('_at')
raise <<~MESSAGE
Illegal timestamp column name! Got #{column_name}.
- Must be one of: #{PERMITTED_TIMESTAMP_COLUMNS.to_a}
+ Must end with `_at`}
MESSAGE
end
diff --git a/lib/gitlab/database/migration_helpers/cascading_namespace_settings.rb b/lib/gitlab/database/migration_helpers/cascading_namespace_settings.rb
index d9ef5ab462e..8a37e619285 100644
--- a/lib/gitlab/database/migration_helpers/cascading_namespace_settings.rb
+++ b/lib/gitlab/database/migration_helpers/cascading_namespace_settings.rb
@@ -31,10 +31,10 @@ module Gitlab
namespace_options = options.merge(null: true, default: nil)
- add_column(:namespace_settings, setting_name, type, namespace_options)
+ add_column(:namespace_settings, setting_name, type, **namespace_options)
add_column(:namespace_settings, lock_column_name, :boolean, default: false, null: false)
- add_column(:application_settings, setting_name, type, options)
+ add_column(:application_settings, setting_name, type, **options)
add_column(:application_settings, lock_column_name, :boolean, default: false, null: false)
end
diff --git a/lib/gitlab/database/migrations/observation.rb b/lib/gitlab/database/migrations/observation.rb
index 54eedec3c7b..a494c357950 100644
--- a/lib/gitlab/database/migrations/observation.rb
+++ b/lib/gitlab/database/migrations/observation.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -14,3 +15,5 @@ module Gitlab
end
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/database/migrations/observers.rb b/lib/gitlab/database/migrations/observers.rb
index 140b3feed64..b890e62c2d0 100644
--- a/lib/gitlab/database/migrations/observers.rb
+++ b/lib/gitlab/database/migrations/observers.rb
@@ -9,7 +9,8 @@ module Gitlab
TotalDatabaseSizeChange,
QueryStatistics,
QueryLog,
- QueryDetails
+ QueryDetails,
+ TransactionDuration
]
end
end
diff --git a/lib/gitlab/database/migrations/observers/transaction_duration.rb b/lib/gitlab/database/migrations/observers/transaction_duration.rb
new file mode 100644
index 00000000000..a96b94334cf
--- /dev/null
+++ b/lib/gitlab/database/migrations/observers/transaction_duration.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Migrations
+ module Observers
+ class TransactionDuration < MigrationObserver
+ def before
+ file_path = File.join(output_dir, "#{observation.version}_#{observation.name}-transaction-duration.json")
+ @file = File.open(file_path, 'wb')
+ @writer = Oj::StreamWriter.new(@file, {})
+ @writer.push_array
+ @subscriber = ActiveSupport::Notifications.subscribe('transaction.active_record') do |*args|
+ record_sql_event(*args)
+ end
+ end
+
+ def after
+ ActiveSupport::Notifications.unsubscribe(@subscriber)
+ @writer.pop_all
+ @writer.flush
+ @file.close
+ end
+
+ def record
+ # no-op
+ end
+
+ def record_sql_event(_name, started, finished, _unique_id, payload)
+ return if payload[:transaction_type] == :fake_transaction
+
+ @writer.push_value({
+ start_time: started.iso8601(6),
+ end_time: finished.iso8601(6),
+ transaction_type: payload[:transaction_type]
+ })
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning.rb b/lib/gitlab/database/partitioning.rb
index 71fb995577a..1343354715a 100644
--- a/lib/gitlab/database/partitioning.rb
+++ b/lib/gitlab/database/partitioning.rb
@@ -3,20 +3,83 @@
module Gitlab
module Database
module Partitioning
- def self.register_models(models)
- registered_models.merge(models)
- end
+ class TableWithoutModel
+ include PartitionedTable::ClassMethods
- def self.registered_models
- @registered_models ||= Set.new
- end
+ attr_reader :table_name
+
+ def initialize(table_name:, partitioned_column:, strategy:)
+ @table_name = table_name
+ partitioned_by(partitioned_column, strategy: strategy)
+ end
- def self.sync_partitions(models_to_sync = registered_models)
- MultiDatabasePartitionManager.new(models_to_sync).sync_partitions
+ def connection
+ Gitlab::Database::SharedModel.connection
+ end
end
- def self.drop_detached_partitions
- MultiDatabasePartitionDropper.new.drop_detached_partitions
+ class << self
+ def register_models(models)
+ models.each do |model|
+ raise "#{model} should have partitioning strategy defined" unless model.respond_to?(:partitioning_strategy)
+
+ registered_models << model
+ end
+ end
+
+ def register_tables(tables)
+ registered_tables.merge(tables)
+ end
+
+ def sync_partitions_ignore_db_error
+ sync_partitions unless ENV['DISABLE_POSTGRES_PARTITION_CREATION_ON_STARTUP']
+ rescue ActiveRecord::ActiveRecordError, PG::Error
+ # ignore - happens when Rake tasks yet have to create a database, e.g. for testing
+ end
+
+ def sync_partitions(models_to_sync = registered_for_sync)
+ Gitlab::AppLogger.info(message: 'Syncing dynamic postgres partitions')
+
+ Gitlab::Database::EachDatabase.each_model_connection(models_to_sync) do |model|
+ PartitionManager.new(model).sync_partitions
+ end
+
+ Gitlab::AppLogger.info(message: 'Finished sync of dynamic postgres partitions')
+ end
+
+ def report_metrics(models_to_monitor = registered_models)
+ partition_monitoring = PartitionMonitoring.new
+
+ Gitlab::Database::EachDatabase.each_model_connection(models_to_monitor) do |model|
+ partition_monitoring.report_metrics_for_model(model)
+ end
+ end
+
+ def drop_detached_partitions
+ Gitlab::AppLogger.info(message: 'Dropping detached postgres partitions')
+
+ Gitlab::Database::EachDatabase.each_database_connection do
+ DetachedPartitionDropper.new.perform
+ end
+
+ Gitlab::AppLogger.info(message: 'Finished dropping detached postgres partitions')
+ end
+
+ def registered_models
+ @registered_models ||= Set.new
+ end
+
+ def registered_tables
+ @registered_tables ||= Set.new
+ end
+
+ private
+
+ def registered_for_sync
+ registered_models + registered_tables.map do |table|
+ TableWithoutModel.new(**table)
+ end
+ end
end
end
end
diff --git a/lib/gitlab/database/partitioning/detached_partition_dropper.rb b/lib/gitlab/database/partitioning/detached_partition_dropper.rb
index 3e7ddece20b..593824384b5 100644
--- a/lib/gitlab/database/partitioning/detached_partition_dropper.rb
+++ b/lib/gitlab/database/partitioning/detached_partition_dropper.rb
@@ -9,13 +9,10 @@ module Gitlab
Gitlab::AppLogger.info(message: "Checking for previously detached partitions to drop")
Postgresql::DetachedPartition.ready_to_drop.find_each do |detached_partition|
- connection.transaction do
- # Another process may have already dropped the table and deleted this entry
- next unless (detached_partition = Postgresql::DetachedPartition.lock.find_by(id: detached_partition.id))
-
- drop_detached_partition(detached_partition.table_name)
-
- detached_partition.destroy!
+ if partition_attached?(qualify_partition_name(detached_partition.table_name))
+ unmark_partition(detached_partition)
+ else
+ drop_partition(detached_partition)
end
rescue StandardError => e
Gitlab::AppLogger.error(message: "Failed to drop previously detached partition",
@@ -27,31 +24,100 @@ module Gitlab
private
+ def unmark_partition(detached_partition)
+ connection.transaction do
+ # Another process may have already encountered this case and deleted this entry
+ next unless try_lock_detached_partition(detached_partition.id)
+
+ # The current partition was scheduled for deletion incorrectly
+ # Dropping it now could delete in-use data and take locks that interrupt other database activity
+ Gitlab::AppLogger.error(message: "Prevented an attempt to drop an attached database partition", partition_name: detached_partition.table_name)
+ detached_partition.destroy!
+ end
+ end
+
+ def drop_partition(detached_partition)
+ remove_foreign_keys(detached_partition)
+
+ connection.transaction do
+ # Another process may have already dropped the table and deleted this entry
+ next unless try_lock_detached_partition(detached_partition.id)
+
+ drop_detached_partition(detached_partition.table_name)
+
+ detached_partition.destroy!
+ end
+ end
+
+ def remove_foreign_keys(detached_partition)
+ partition_identifier = qualify_partition_name(detached_partition.table_name)
+
+ # We want to load all of these into memory at once to get a consistent view to loop over,
+ # since we'll be deleting from this list as we go
+ fks_to_drop = PostgresForeignKey.by_constrained_table_identifier(partition_identifier).to_a
+ fks_to_drop.each do |foreign_key|
+ drop_foreign_key_if_present(detached_partition, foreign_key)
+ end
+ end
+
+ # Drops the given foreign key for the given detached partition, but only if another process has not already
+ # detached the partition first. This method must be safe to call even if the associated partition table has already
+ # been detached, as it could be called by multiple processes at once.
+ def drop_foreign_key_if_present(detached_partition, foreign_key)
+ # It is important to only drop one foreign key per transaction.
+ # Dropping a foreign key takes an ACCESS EXCLUSIVE lock on both tables participating in the foreign key.
+
+ partition_identifier = qualify_partition_name(detached_partition.table_name)
+ with_lock_retries do
+ connection.transaction(requires_new: false) do
+ next unless try_lock_detached_partition(detached_partition.id)
+
+ # Another process may have already dropped this foreign key
+ next unless PostgresForeignKey.by_constrained_table_identifier(partition_identifier).where(name: foreign_key.name).exists?
+
+ connection.execute("ALTER TABLE #{connection.quote_table_name(partition_identifier)} DROP CONSTRAINT #{connection.quote_table_name(foreign_key.name)}")
+
+ Gitlab::AppLogger.info(message: "Dropped foreign key for previously detached partition",
+ partition_name: detached_partition.table_name,
+ referenced_table_name: foreign_key.referenced_table_identifier,
+ foreign_key_name: foreign_key.name)
+ end
+ end
+ end
+
def drop_detached_partition(partition_name)
partition_identifier = qualify_partition_name(partition_name)
- if partition_detached?(partition_identifier)
- connection.drop_table(partition_identifier, if_exists: true)
+ connection.drop_table(partition_identifier, if_exists: true)
- Gitlab::AppLogger.info(message: "Dropped previously detached partition", partition_name: partition_name)
- else
- Gitlab::AppLogger.error(message: "Attempt to drop attached database partition", partition_name: partition_name)
- end
+ Gitlab::AppLogger.info(message: "Dropped previously detached partition", partition_name: partition_name)
end
def qualify_partition_name(table_name)
"#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{table_name}"
end
- def partition_detached?(partition_identifier)
+ def partition_attached?(partition_identifier)
# PostgresPartition checks the pg_inherits view, so our partition will only show here if it's still attached
# and thus should not be dropped
- !Gitlab::Database::PostgresPartition.for_identifier(partition_identifier).exists?
+ Gitlab::Database::PostgresPartition.for_identifier(partition_identifier).exists?
+ end
+
+ def try_lock_detached_partition(id)
+ Postgresql::DetachedPartition.lock.find_by(id: id).present?
end
def connection
Postgresql::DetachedPartition.connection
end
+
+ def with_lock_retries(&block)
+ Gitlab::Database::WithLockRetries.new(
+ klass: self.class,
+ logger: Gitlab::AppLogger,
+ connection: connection
+ ).run(raise_on_exhaustion: true, &block)
+ end
end
end
end
diff --git a/lib/gitlab/database/partitioning/monthly_strategy.rb b/lib/gitlab/database/partitioning/monthly_strategy.rb
index 4cdde5bf2f1..c93e775d7ed 100644
--- a/lib/gitlab/database/partitioning/monthly_strategy.rb
+++ b/lib/gitlab/database/partitioning/monthly_strategy.rb
@@ -96,10 +96,6 @@ module Gitlab
def oldest_active_date
(Date.today - retain_for).beginning_of_month
end
-
- def connection
- ActiveRecord::Base.connection
- end
end
end
end
diff --git a/lib/gitlab/database/partitioning/multi_database_partition_dropper.rb b/lib/gitlab/database/partitioning/multi_database_partition_dropper.rb
deleted file mode 100644
index 769b658bae4..00000000000
--- a/lib/gitlab/database/partitioning/multi_database_partition_dropper.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Database
- module Partitioning
- class MultiDatabasePartitionDropper
- def drop_detached_partitions
- Gitlab::AppLogger.info(message: "Dropping detached postgres partitions")
-
- each_database_connection do |name, connection|
- Gitlab::Database::SharedModel.using_connection(connection) do
- Gitlab::AppLogger.debug(message: "Switched database connection", connection_name: name)
-
- DetachedPartitionDropper.new.perform
- end
- end
-
- Gitlab::AppLogger.info(message: "Finished dropping detached postgres partitions")
- end
-
- private
-
- def each_database_connection
- databases.each_pair do |name, connection_wrapper|
- yield name, connection_wrapper.scope.connection
- end
- end
-
- def databases
- Gitlab::Database.databases
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/database/partitioning/multi_database_partition_manager.rb b/lib/gitlab/database/partitioning/multi_database_partition_manager.rb
deleted file mode 100644
index 5a93e3fb1fb..00000000000
--- a/lib/gitlab/database/partitioning/multi_database_partition_manager.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Database
- module Partitioning
- class MultiDatabasePartitionManager
- def initialize(models)
- @models = models
- end
-
- def sync_partitions
- Gitlab::AppLogger.info(message: "Syncing dynamic postgres partitions")
-
- models.each do |model|
- Gitlab::Database::SharedModel.using_connection(model.connection) do
- Gitlab::AppLogger.debug(message: "Switched database connection",
- connection_name: connection_name,
- table_name: model.table_name)
-
- PartitionManager.new(model).sync_partitions
- end
- end
-
- Gitlab::AppLogger.info(message: "Finished sync of dynamic postgres partitions")
- end
-
- private
-
- attr_reader :models
-
- def connection_name
- Gitlab::Database::SharedModel.connection.pool.db_config.name
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/database/partitioning/partition_monitoring.rb b/lib/gitlab/database/partitioning/partition_monitoring.rb
index e5b561fc447..1a23f58285d 100644
--- a/lib/gitlab/database/partitioning/partition_monitoring.rb
+++ b/lib/gitlab/database/partitioning/partition_monitoring.rb
@@ -4,20 +4,12 @@ module Gitlab
module Database
module Partitioning
class PartitionMonitoring
- attr_reader :models
+ def report_metrics_for_model(model)
+ strategy = model.partitioning_strategy
- def initialize(models = Gitlab::Database::Partitioning.registered_models)
- @models = models
- end
-
- def report_metrics
- models.each do |model|
- strategy = model.partitioning_strategy
-
- gauge_present.set({ table: model.table_name }, strategy.current_partitions.size)
- gauge_missing.set({ table: model.table_name }, strategy.missing_partitions.size)
- gauge_extra.set({ table: model.table_name }, strategy.extra_partitions.size)
- end
+ gauge_present.set({ table: model.table_name }, strategy.current_partitions.size)
+ gauge_missing.set({ table: model.table_name }, strategy.missing_partitions.size)
+ gauge_extra.set({ table: model.table_name }, strategy.extra_partitions.size)
end
private
diff --git a/lib/gitlab/database/partitioning/replace_table.rb b/lib/gitlab/database/partitioning/replace_table.rb
index 6f6af223fa2..a7686e97553 100644
--- a/lib/gitlab/database/partitioning/replace_table.rb
+++ b/lib/gitlab/database/partitioning/replace_table.rb
@@ -9,7 +9,8 @@ module Gitlab
attr_reader :original_table, :replacement_table, :replaced_table, :primary_key_column,
:sequence, :original_primary_key, :replacement_primary_key, :replaced_primary_key
- def initialize(original_table, replacement_table, replaced_table, primary_key_column)
+ def initialize(connection, original_table, replacement_table, replaced_table, primary_key_column)
+ @connection = connection
@original_table = original_table
@replacement_table = replacement_table
@replaced_table = replaced_table
@@ -29,10 +30,8 @@ module Gitlab
private
+ attr_reader :connection
delegate :execute, :quote_table_name, :quote_column_name, to: :connection
- def connection
- @connection ||= ActiveRecord::Base.connection
- end
def default_sequence(table, column)
"#{table}_#{column}_seq"
diff --git a/lib/gitlab/database/partitioning/time_partition.rb b/lib/gitlab/database/partitioning/time_partition.rb
index e09ca483549..649687bdd12 100644
--- a/lib/gitlab/database/partitioning/time_partition.rb
+++ b/lib/gitlab/database/partitioning/time_partition.rb
@@ -87,7 +87,7 @@ module Gitlab
end
def conn
- @conn ||= ActiveRecord::Base.connection
+ @conn ||= Gitlab::Database::SharedModel.connection
end
end
end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
index 0dc9f92e4c8..c382d2f0715 100644
--- a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
+++ b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
@@ -428,8 +428,8 @@ module Gitlab
end
def replace_table(original_table_name, replacement_table_name, replaced_table_name, primary_key_name)
- replace_table = Gitlab::Database::Partitioning::ReplaceTable.new(original_table_name.to_s,
- replacement_table_name, replaced_table_name, primary_key_name)
+ replace_table = Gitlab::Database::Partitioning::ReplaceTable.new(connection,
+ original_table_name.to_s, replacement_table_name, replaced_table_name, primary_key_name)
transaction do
drop_sync_trigger(original_table_name)
diff --git a/lib/gitlab/database/postgres_foreign_key.rb b/lib/gitlab/database/postgres_foreign_key.rb
index 72640f8785d..241b6f009f7 100644
--- a/lib/gitlab/database/postgres_foreign_key.rb
+++ b/lib/gitlab/database/postgres_foreign_key.rb
@@ -10,6 +10,12 @@ module Gitlab
where(referenced_table_identifier: identifier)
end
+
+ scope :by_constrained_table_identifier, ->(identifier) do
+ raise ArgumentError, "Constrained table name is not fully qualified with a schema: #{identifier}" unless identifier =~ /^\w+\.\w+$/
+
+ where(constrained_table_identifier: identifier)
+ end
end
end
end
diff --git a/lib/gitlab/database/postgres_hll/batch_distinct_counter.rb b/lib/gitlab/database/postgres_hll/batch_distinct_counter.rb
index 2e3f674cf82..4e973efebca 100644
--- a/lib/gitlab/database/postgres_hll/batch_distinct_counter.rb
+++ b/lib/gitlab/database/postgres_hll/batch_distinct_counter.rb
@@ -57,7 +57,7 @@ module Gitlab
# @param finish final pkey range
# @return [Gitlab::Database::PostgresHll::Buckets] HyperLogLog data structure instance that can estimate number of unique elements
def execute(batch_size: nil, start: nil, finish: nil)
- raise 'BatchCount can not be run inside a transaction' if ActiveRecord::Base.connection.transaction_open? # rubocop: disable Database/MultipleDatabases
+ raise 'BatchCount can not be run inside a transaction' if transaction_open?
batch_size ||= DEFAULT_BATCH_SIZE
start = actual_start(start)
@@ -79,6 +79,10 @@ module Gitlab
private
+ def transaction_open?
+ @relation.connection.transaction_open?
+ end
+
def unwanted_configuration?(start, finish, batch_size)
batch_size <= MIN_REQUIRED_BATCH_SIZE ||
(finish - start) >= MAX_DATA_VOLUME ||
diff --git a/lib/gitlab/database/postgres_index.rb b/lib/gitlab/database/postgres_index.rb
index 1079bfdeda3..4a9d8728c83 100644
--- a/lib/gitlab/database/postgres_index.rb
+++ b/lib/gitlab/database/postgres_index.rb
@@ -2,7 +2,7 @@
module Gitlab
module Database
- class PostgresIndex < ActiveRecord::Base
+ class PostgresIndex < SharedModel
include Gitlab::Utils::StrongMemoize
self.table_name = 'postgres_indexes'
@@ -11,6 +11,7 @@ module Gitlab
has_one :bloat_estimate, class_name: 'Gitlab::Database::PostgresIndexBloatEstimate', foreign_key: :identifier
has_many :reindexing_actions, class_name: 'Gitlab::Database::Reindexing::ReindexAction', foreign_key: :index_identifier
+ has_many :queued_reindexing_actions, class_name: 'Gitlab::Database::Reindexing::QueuedAction', foreign_key: :index_identifier
scope :by_identifier, ->(identifier) do
raise ArgumentError, "Index name is not fully qualified with a schema: #{identifier}" unless identifier =~ /^\w+\.\w+$/
diff --git a/lib/gitlab/database/postgres_index_bloat_estimate.rb b/lib/gitlab/database/postgres_index_bloat_estimate.rb
index 379227bf87c..5c9b5777b74 100644
--- a/lib/gitlab/database/postgres_index_bloat_estimate.rb
+++ b/lib/gitlab/database/postgres_index_bloat_estimate.rb
@@ -6,7 +6,7 @@ module Gitlab
# for all indexes can be expensive in a large database.
#
# Best used on a per-index basis.
- class PostgresIndexBloatEstimate < ActiveRecord::Base
+ class PostgresIndexBloatEstimate < SharedModel
self.table_name = 'postgres_index_bloat_estimates'
self.primary_key = 'identifier'
diff --git a/lib/gitlab/database/query_analyzer.rb b/lib/gitlab/database/query_analyzer.rb
new file mode 100644
index 00000000000..0f285688876
--- /dev/null
+++ b/lib/gitlab/database/query_analyzer.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ # The purpose of this class is to implement a various query analyzers based on `pg_query`
+ # And process them all via `Gitlab::Database::QueryAnalyzers::*`
+ #
+ # Sometimes this might cause errors in specs.
+ # This is best to be disable with `describe '...', query_analyzers: false do`
+ class QueryAnalyzer
+ include ::Singleton
+
+ Parsed = Struct.new(
+ :sql, :connection, :pg
+ )
+
+ attr_reader :all_analyzers
+
+ def initialize
+ @all_analyzers = []
+ end
+
+ def hook!
+ @subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |event|
+ # In some cases analyzer code might trigger another SQL call
+ # to avoid stack too deep this detects recursive call of subscriber
+ with_ignored_recursive_calls do
+ process_sql(event.payload[:sql], event.payload[:connection])
+ end
+ end
+ end
+
+ def within
+ # Due to singleton nature of analyzers
+ # only an outer invocation of the `.within`
+ # is allowed to initialize them
+ return yield if already_within?
+
+ begin!
+
+ begin
+ yield
+ ensure
+ end!
+ end
+ end
+
+ def already_within?
+ # If analyzers are set they are already configured
+ !enabled_analyzers.nil?
+ end
+
+ def process_sql(sql, connection)
+ analyzers = enabled_analyzers
+ return unless analyzers&.any?
+
+ parsed = parse(sql, connection)
+ return unless parsed
+
+ analyzers.each do |analyzer|
+ next if analyzer.suppressed?
+
+ analyzer.analyze(parsed)
+ rescue StandardError => e
+ # We catch all standard errors to prevent validation errors to introduce fatal errors in production
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
+ end
+ end
+
+ private
+
+ # Enable query analyzers
+ def begin!
+ analyzers = all_analyzers.select do |analyzer|
+ if analyzer.enabled?
+ analyzer.begin!
+
+ true
+ end
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
+
+ false
+ end
+
+ Thread.current[:query_analyzer_enabled_analyzers] = analyzers
+ end
+
+ # Disable enabled query analyzers
+ def end!
+ enabled_analyzers.select do |analyzer|
+ analyzer.end!
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
+ end
+
+ Thread.current[:query_analyzer_enabled_analyzers] = nil
+ end
+
+ def enabled_analyzers
+ Thread.current[:query_analyzer_enabled_analyzers]
+ end
+
+ def parse(sql, connection)
+ parsed = PgQuery.parse(sql)
+ return unless parsed
+
+ normalized = PgQuery.normalize(sql)
+ Parsed.new(normalized, connection, parsed)
+ rescue PgQuery::ParseError => e
+ # Ignore PgQuery parse errors (due to depth limit or other reasons)
+ Gitlab::ErrorTracking.track_exception(e)
+
+ nil
+ end
+
+ def with_ignored_recursive_calls
+ return if Thread.current[:query_analyzer_recursive]
+
+ begin
+ Thread.current[:query_analyzer_recursive] = true
+ yield
+ ensure
+ Thread.current[:query_analyzer_recursive] = nil
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/query_analyzers/base.rb b/lib/gitlab/database/query_analyzers/base.rb
new file mode 100644
index 00000000000..e8066f7a706
--- /dev/null
+++ b/lib/gitlab/database/query_analyzers/base.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module QueryAnalyzers
+ class Base
+ def self.suppressed?
+ Thread.current[self.suppress_key]
+ end
+
+ def self.suppress=(value)
+ Thread.current[self.suppress_key] = value
+ end
+
+ def self.with_suppressed(value = true, &blk)
+ previous = self.suppressed?
+ self.suppress = value
+ yield
+ ensure
+ self.suppress = previous
+ end
+
+ def self.begin!
+ Thread.current[self.context_key] = {}
+ end
+
+ def self.end!
+ Thread.current[self.context_key] = nil
+ end
+
+ def self.context
+ Thread.current[self.context_key]
+ end
+
+ def self.enabled?
+ raise NotImplementedError
+ end
+
+ def self.analyze(parsed)
+ raise NotImplementedError
+ end
+
+ def self.context_key
+ "#{self.class.name}_context"
+ end
+
+ def self.suppress_key
+ "#{self.class.name}_suppressed"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics.rb b/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics.rb
new file mode 100644
index 00000000000..06e2b114c91
--- /dev/null
+++ b/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module QueryAnalyzers
+ # The purpose of this analyzer is to observe via prometheus metrics
+ # all unique schemas observed on a given connection
+ #
+ # This effectively allows to do sample 1% or 0.01% of queries hitting
+ # system and observe if on a given connection we observe queries that
+ # are misaligned (`ci_replica` sees queries doing accessing only `gitlab_main`)
+ #
+ class GitlabSchemasMetrics < Base
+ class << self
+ def enabled?
+ ::Feature::FlipperFeature.table_exists? &&
+ Feature.enabled?(:query_analyzer_gitlab_schema_metrics)
+ end
+
+ def analyze(parsed)
+ db_config_name = ::Gitlab::Database.db_config_name(parsed.connection)
+ return unless db_config_name
+
+ gitlab_schemas = ::Gitlab::Database::GitlabSchema.table_schemas(parsed.pg.tables)
+ return if gitlab_schemas.empty?
+
+ # to reduce amount of labels sort schemas used
+ gitlab_schemas = gitlab_schemas.to_a.sort.join(",")
+
+ schemas_metrics.increment({
+ gitlab_schemas: gitlab_schemas,
+ db_config_name: db_config_name
+ })
+ end
+
+ def schemas_metrics
+ @schemas_metrics ||= ::Gitlab::Metrics.counter(
+ :gitlab_database_decomposition_gitlab_schemas_used,
+ 'The number of observed schemas dependent on connection'
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb b/lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb
new file mode 100644
index 00000000000..2233f3c4646
--- /dev/null
+++ b/lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module QueryAnalyzers
+ class PreventCrossDatabaseModification < Database::QueryAnalyzers::Base
+ CrossDatabaseModificationAcrossUnsupportedTablesError = Class.new(StandardError)
+
+ # This method will allow cross database modifications within the block
+ # Example:
+ #
+ # allow_cross_database_modification_within_transaction(url: 'url-to-an-issue') do
+ # create(:build) # inserts ci_build and project record in one transaction
+ # end
+ def self.allow_cross_database_modification_within_transaction(url:, &blk)
+ self.with_suppressed(true, &blk)
+ end
+
+ # This method will prevent cross database modifications within the block
+ # if it was allowed previously
+ def self.with_cross_database_modification_prevented(&blk)
+ self.with_suppressed(false, &blk)
+ end
+
+ def self.begin!
+ super
+
+ context.merge!({
+ transaction_depth_by_db: Hash.new { |h, k| h[k] = 0 },
+ modified_tables_by_db: Hash.new { |h, k| h[k] = Set.new }
+ })
+ end
+
+ def self.enabled?
+ ::Feature::FlipperFeature.table_exists? &&
+ Feature.enabled?(:detect_cross_database_modification, default_enabled: :yaml)
+ end
+
+ # rubocop:disable Metrics/AbcSize
+ def self.analyze(parsed)
+ return if in_factory_bot_create?
+
+ database = ::Gitlab::Database.db_config_name(parsed.connection)
+ sql = parsed.sql
+
+ # We ignore BEGIN in tests as this is the outer transaction for
+ # DatabaseCleaner
+ if sql.start_with?('SAVEPOINT') || (!Rails.env.test? && sql.start_with?('BEGIN'))
+ context[:transaction_depth_by_db][database] += 1
+
+ return
+ elsif sql.start_with?('RELEASE SAVEPOINT', 'ROLLBACK TO SAVEPOINT') || (!Rails.env.test? && sql.start_with?('ROLLBACK', 'COMMIT'))
+ context[:transaction_depth_by_db][database] -= 1
+ if context[:transaction_depth_by_db][database] <= 0
+ context[:modified_tables_by_db][database].clear
+ end
+
+ return
+ end
+
+ return if context[:transaction_depth_by_db].values.all?(&:zero?)
+
+ # PgQuery might fail in some cases due to limited nesting:
+ # https://github.com/pganalyze/pg_query/issues/209
+ tables = sql.downcase.include?(' for update') ? parsed.pg.tables : parsed.pg.dml_tables
+
+ # We have some code where plans and gitlab_subscriptions are lazily
+ # created and this causes lots of spec failures
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/343394
+ tables -= %w[plans gitlab_subscriptions]
+
+ return if tables.empty?
+
+ # All migrations will write to schema_migrations in the same transaction.
+ # It's safe to ignore this since schema_migrations exists in all
+ # databases
+ return if tables == ['schema_migrations']
+
+ context[:modified_tables_by_db][database].merge(tables)
+ all_tables = context[:modified_tables_by_db].values.map(&:to_a).flatten
+ schemas = ::Gitlab::Database::GitlabSchema.table_schemas(all_tables)
+
+ if schemas.many?
+ message = "Cross-database data modification of '#{schemas.to_a.join(", ")}' were detected within " \
+ "a transaction modifying the '#{all_tables.to_a.join(", ")}' tables." \
+ "Please refer to https://docs.gitlab.com/ee/development/database/multiple_databases.html#removing-cross-database-transactions for details on how to resolve this exception."
+
+ if schemas.any? { |s| s.to_s.start_with?("undefined") }
+ message += " The gitlab_schema was undefined for one or more of the tables in this transaction. Any new tables must be added to lib/gitlab/database/gitlab_schemas.yml ."
+ end
+
+ raise CrossDatabaseModificationAcrossUnsupportedTablesError, message
+ end
+ rescue CrossDatabaseModificationAcrossUnsupportedTablesError => e
+ ::Gitlab::ErrorTracking.track_exception(e, { gitlab_schemas: schemas, tables: all_tables, query: parsed.sql })
+ raise if raise_exception?
+ end
+ # rubocop:enable Metrics/AbcSize
+
+ # We only raise in tests for now otherwise some features will be broken
+ # in development. For now we've mostly only added allowlist based on
+ # spec names. Until we have allowed all the violations inline we don't
+ # want to raise in development.
+ def self.raise_exception?
+ Rails.env.test?
+ end
+
+ # We ignore execution in the #create method from FactoryBot
+ # because it is not representative of real code we run in
+ # production. There are far too many false positives caused
+ # by instantiating objects in different `gitlab_schema` in a
+ # FactoryBot `create`.
+ def self.in_factory_bot_create?
+ Rails.env.test? && caller_locations.any? { |l| l.path.end_with?('lib/factory_bot/evaluation.rb') && l.label == 'create' }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/reflection.rb b/lib/gitlab/database/reflection.rb
new file mode 100644
index 00000000000..48a4de28541
--- /dev/null
+++ b/lib/gitlab/database/reflection.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ # A class for reflecting upon a database and its settings, such as the
+ # adapter name, PostgreSQL version, and the presence of tables or columns.
+ class Reflection
+ attr_reader :model
+
+ def initialize(model)
+ @model = model
+ @version = nil
+ end
+
+ def config
+ # The result of this method must not be cached, as other methods may use
+ # it after making configuration changes and expect those changes to be
+ # present. For example, `disable_prepared_statements` expects the
+ # configuration settings to always be up to date.
+ #
+ # See the following for more information:
+ #
+ # - https://gitlab.com/gitlab-org/release/retrospectives/-/issues/39
+ # - https://gitlab.com/gitlab-com/gl-infra/production/-/issues/5238
+ model.connection_db_config.configuration_hash.with_indifferent_access
+ end
+
+ def username
+ config[:username] || ENV['USER']
+ end
+
+ def database_name
+ config[:database]
+ end
+
+ def adapter_name
+ config[:adapter]
+ end
+
+ def human_adapter_name
+ if postgresql?
+ 'PostgreSQL'
+ else
+ 'Unknown'
+ end
+ end
+
+ def postgresql?
+ adapter_name.casecmp('postgresql') == 0
+ end
+
+ # Check whether the underlying database is in read-only mode
+ def db_read_only?
+ pg_is_in_recovery =
+ connection
+ .execute('SELECT pg_is_in_recovery()')
+ .first
+ .fetch('pg_is_in_recovery')
+
+ Gitlab::Utils.to_boolean(pg_is_in_recovery)
+ end
+
+ def db_read_write?
+ !db_read_only?
+ end
+
+ def version
+ @version ||= database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
+ end
+
+ def database_version
+ connection.execute("SELECT VERSION()").first['version']
+ end
+
+ def postgresql_minimum_supported_version?
+ version.to_f >= MINIMUM_POSTGRES_VERSION
+ end
+
+ def cached_column_exists?(column_name)
+ connection
+ .schema_cache.columns_hash(model.table_name)
+ .has_key?(column_name.to_s)
+ end
+
+ def cached_table_exists?
+ exists? && connection.schema_cache.data_source_exists?(model.table_name)
+ end
+
+ def exists?
+ # We can't _just_ check if `connection` raises an error, as it will
+ # point to a `ConnectionProxy`, and obtaining those doesn't involve any
+ # database queries. So instead we obtain the database version, which is
+ # cached after the first call.
+ connection.schema_cache.database_version
+ true
+ rescue StandardError
+ false
+ end
+
+ def system_id
+ row = connection
+ .execute('SELECT system_identifier FROM pg_control_system()')
+ .first
+
+ row['system_identifier']
+ end
+
+ private
+
+ def connection
+ model.connection
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/reindexing.rb b/lib/gitlab/database/reindexing.rb
index 04b409a9306..7a22e324bdb 100644
--- a/lib/gitlab/database/reindexing.rb
+++ b/lib/gitlab/database/reindexing.rb
@@ -15,25 +15,58 @@ module Gitlab
# on e.g. vacuum.
REMOVE_INDEX_RETRY_CONFIG = [[1.minute, 9.minutes]] * 30
- # candidate_indexes: Array of Gitlab::Database::PostgresIndex
- def self.perform(candidate_indexes, how_many: DEFAULT_INDEXES_PER_INVOCATION)
- IndexSelection.new(candidate_indexes).take(how_many).each do |index|
+ # Performs automatic reindexing for a limited number of indexes per call
+ # 1. Consume from the explicit reindexing queue
+ # 2. Apply bloat heuristic to find most bloated indexes and reindex those
+ def self.automatic_reindexing(maximum_records: DEFAULT_INDEXES_PER_INVOCATION)
+ # Cleanup leftover temporary indexes from previous, possibly aborted runs (if any)
+ cleanup_leftovers!
+
+ # Consume from the explicit reindexing queue first
+ done_counter = perform_from_queue(maximum_records: maximum_records)
+
+ return if done_counter >= maximum_records
+
+ # Execute reindexing based on bloat heuristic
+ perform_with_heuristic(maximum_records: maximum_records - done_counter)
+ end
+
+ # Reindex based on bloat heuristic for a limited number of indexes per call
+ #
+ # We use a bloat heuristic to estimate the index bloat and pick the
+ # most bloated indexes for reindexing.
+ def self.perform_with_heuristic(candidate_indexes = Gitlab::Database::PostgresIndex.reindexing_support, maximum_records: DEFAULT_INDEXES_PER_INVOCATION)
+ IndexSelection.new(candidate_indexes).take(maximum_records).each do |index|
Coordinator.new(index).perform
end
end
+ # Reindex indexes that have been explicitly enqueued (for a limited number of indexes per call)
+ def self.perform_from_queue(maximum_records: DEFAULT_INDEXES_PER_INVOCATION)
+ QueuedAction.in_queue_order.limit(maximum_records).each do |queued_entry|
+ Coordinator.new(queued_entry.index).perform
+
+ queued_entry.done!
+ rescue StandardError => e
+ queued_entry.failed!
+
+ Gitlab::AppLogger.error("Failed to perform reindexing action on queued entry #{queued_entry}: #{e}")
+ end.size
+ end
+
def self.cleanup_leftovers!
PostgresIndex.reindexing_leftovers.each do |index|
Gitlab::AppLogger.info("Removing index #{index.identifier} which is a leftover, temporary index from previous reindexing activity")
retries = Gitlab::Database::WithLockRetriesOutsideTransaction.new(
+ connection: index.connection,
timing_configuration: REMOVE_INDEX_RETRY_CONFIG,
klass: self.class,
logger: Gitlab::AppLogger
)
retries.run(raise_on_exhaustion: false) do
- ApplicationRecord.connection.tap do |conn|
+ index.connection.tap do |conn|
conn.execute("DROP INDEX CONCURRENTLY IF EXISTS #{conn.quote_table_name(index.schema)}.#{conn.quote_table_name(index.name)}")
end
end
diff --git a/lib/gitlab/database/reindexing/index_selection.rb b/lib/gitlab/database/reindexing/index_selection.rb
index 2186384e7d7..2d384f2f9e2 100644
--- a/lib/gitlab/database/reindexing/index_selection.rb
+++ b/lib/gitlab/database/reindexing/index_selection.rb
@@ -9,8 +9,8 @@ module Gitlab
# Only reindex indexes with a relative bloat level (bloat estimate / size) higher than this
MINIMUM_RELATIVE_BLOAT = 0.2
- # Only consider indexes with a total ondisk size in this range (before reindexing)
- INDEX_SIZE_RANGE = (1.gigabyte..100.gigabyte).freeze
+ # Only consider indexes beyond this size (before reindexing)
+ INDEX_SIZE_MINIMUM = 1.gigabyte
delegate :each, to: :indexes
@@ -32,7 +32,7 @@ module Gitlab
@indexes ||= candidates
.not_recently_reindexed
- .where(ondisk_size_bytes: INDEX_SIZE_RANGE)
+ .where('ondisk_size_bytes >= ?', INDEX_SIZE_MINIMUM)
.sort_by(&:relative_bloat_level) # forced N+1
.reverse
.select { |candidate| candidate.relative_bloat_level >= MINIMUM_RELATIVE_BLOAT }
diff --git a/lib/gitlab/database/reindexing/queued_action.rb b/lib/gitlab/database/reindexing/queued_action.rb
new file mode 100644
index 00000000000..c2039a289da
--- /dev/null
+++ b/lib/gitlab/database/reindexing/queued_action.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Reindexing
+ class QueuedAction < SharedModel
+ self.table_name = 'postgres_reindex_queued_actions'
+
+ enum state: { queued: 0, done: 1, failed: 2 }
+
+ belongs_to :index, foreign_key: :index_identifier, class_name: 'Gitlab::Database::PostgresIndex'
+
+ scope :in_queue_order, -> { queued.order(:created_at) }
+
+ def to_s
+ "queued action [ id = #{id}, index: #{index_identifier} ]"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/reindexing/reindex_action.rb b/lib/gitlab/database/reindexing/reindex_action.rb
index ff465fffb74..73424a76cfe 100644
--- a/lib/gitlab/database/reindexing/reindex_action.rb
+++ b/lib/gitlab/database/reindexing/reindex_action.rb
@@ -3,7 +3,7 @@
module Gitlab
module Database
module Reindexing
- class ReindexAction < ActiveRecord::Base
+ class ReindexAction < SharedModel
self.table_name = 'postgres_reindex_actions'
belongs_to :index, foreign_key: :index_identifier, class_name: 'Gitlab::Database::PostgresIndex'
diff --git a/lib/gitlab/database/reindexing/reindex_concurrently.rb b/lib/gitlab/database/reindexing/reindex_concurrently.rb
index 7a720f7c539..152935bd734 100644
--- a/lib/gitlab/database/reindexing/reindex_concurrently.rb
+++ b/lib/gitlab/database/reindexing/reindex_concurrently.rb
@@ -8,7 +8,7 @@ module Gitlab
ReindexError = Class.new(StandardError)
TEMPORARY_INDEX_PATTERN = '\_ccnew[0-9]*'
- STATEMENT_TIMEOUT = 9.hours
+ STATEMENT_TIMEOUT = 24.hours
PG_MAX_INDEX_NAME_LENGTH = 63
attr_reader :index, :logger
@@ -99,6 +99,7 @@ module Gitlab
logger.info("Removing dangling index #{index.identifier}")
retries = Gitlab::Database::WithLockRetriesOutsideTransaction.new(
+ connection: connection,
timing_configuration: REMOVE_INDEX_RETRY_CONFIG,
klass: self.class,
logger: logger
@@ -109,11 +110,6 @@ module Gitlab
end
end
- def with_lock_retries(&block)
- arguments = { klass: self.class, logger: logger }
- Gitlab::Database::WithLockRetries.new(**arguments).run(raise_on_exhaustion: true, &block)
- end
-
def set_statement_timeout
execute("SET statement_timeout TO '%ds'" % STATEMENT_TIMEOUT)
yield
@@ -123,7 +119,7 @@ module Gitlab
delegate :execute, :quote_table_name, to: :connection
def connection
- @connection ||= ActiveRecord::Base.connection
+ @connection ||= index.connection
end
end
end
diff --git a/lib/gitlab/database/shared_model.rb b/lib/gitlab/database/shared_model.rb
index f304c32d731..f31dbc01907 100644
--- a/lib/gitlab/database/shared_model.rb
+++ b/lib/gitlab/database/shared_model.rb
@@ -8,13 +8,17 @@ module Gitlab
class << self
def using_connection(connection)
- raise 'cannot nest connection overrides for shared models' unless overriding_connection.nil?
+ previous_connection = self.overriding_connection
+
+ unless previous_connection.nil? || previous_connection.equal?(connection)
+ raise 'cannot nest connection overrides for shared models with different connections'
+ end
self.overriding_connection = connection
yield
ensure
- self.overriding_connection = nil
+ self.overriding_connection = nil unless previous_connection.equal?(self.overriding_connection)
end
def connection
diff --git a/lib/gitlab/database/unidirectional_copy_trigger.rb b/lib/gitlab/database/unidirectional_copy_trigger.rb
index 029c894a5ff..146b5cacd9e 100644
--- a/lib/gitlab/database/unidirectional_copy_trigger.rb
+++ b/lib/gitlab/database/unidirectional_copy_trigger.rb
@@ -3,7 +3,7 @@
module Gitlab
module Database
class UnidirectionalCopyTrigger
- def self.on_table(table_name, connection: ActiveRecord::Base.connection)
+ def self.on_table(table_name, connection:)
new(table_name, connection)
end
diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb
index 1e6d80e1100..83f242ff902 100644
--- a/lib/gitlab/diff/file.rb
+++ b/lib/gitlab/diff/file.rb
@@ -43,6 +43,8 @@ module Gitlab
# Ensure items are collected in the the batch
new_blob_lazy
old_blob_lazy
+
+ preprocess_before_diff(diff) if Feature.enabled?(:jupyter_clean_diffs, repository.project, default_enabled: true)
end
def position(position_marker, position_type: :text)
@@ -448,6 +450,33 @@ module Gitlab
find_renderable_viewer_class(classes)
end
+ def preprocess_before_diff(diff)
+ return unless diff.new_path.ends_with? '.ipynb'
+
+ from = old_blob_lazy&.data
+ to = new_blob_lazy&.data
+
+ transformed_diff = IpynbDiff.diff(from, to,
+ diff_opts: { context: 5, include_diff_info: true },
+ transform_options: { cell_decorator: :percent },
+ raise_if_invalid_notebook: true)
+ new_diff = strip_diff_frontmatter(transformed_diff)
+
+ if new_diff
+ diff.diff = new_diff
+ new_blob_lazy.transformed_for_diff = true if new_blob_lazy
+ old_blob_lazy.transformed_for_diff = true if old_blob_lazy
+ end
+
+ Gitlab::AppLogger.info({ message: new_diff ? 'IPYNB_DIFF_GENERATED' : 'IPYNB_DIFF_NIL' })
+ rescue IpynbDiff::InvalidNotebookError => e
+ Gitlab::ErrorTracking.log_exception(e)
+ end
+
+ def strip_diff_frontmatter(diff_content)
+ diff_content.scan(/.*\n/)[2..-1]&.join('') if diff_content.present?
+ end
+
def alternate_viewer_class
return unless viewer.instance_of?(DiffViewer::Renamed)
diff --git a/lib/gitlab/diff/highlight.rb b/lib/gitlab/diff/highlight.rb
index 32ce35110f8..aedcfe3cb40 100644
--- a/lib/gitlab/diff/highlight.rb
+++ b/lib/gitlab/diff/highlight.rb
@@ -152,6 +152,9 @@ module Gitlab
return [] unless blob
blob.load_all_data!
+
+ return blob.present.highlight_transformed.lines if Feature.enabled?(:jupyter_clean_diffs, @project, default_enabled: true)
+
blob.present.highlight.lines
end
diff --git a/lib/gitlab/diff/highlight_cache.rb b/lib/gitlab/diff/highlight_cache.rb
index 075027ebdc8..12ed11b0140 100644
--- a/lib/gitlab/diff/highlight_cache.rb
+++ b/lib/gitlab/diff/highlight_cache.rb
@@ -213,7 +213,7 @@ module Gitlab
end
def current_transaction
- ::Gitlab::Metrics::Transaction.current
+ ::Gitlab::Metrics::WebTransaction.current
end
end
end
diff --git a/lib/gitlab/diff/position_tracer/line_strategy.rb b/lib/gitlab/diff/position_tracer/line_strategy.rb
index 8bacc781f61..0f0b8f0c4f3 100644
--- a/lib/gitlab/diff/position_tracer/line_strategy.rb
+++ b/lib/gitlab/diff/position_tracer/line_strategy.rb
@@ -104,7 +104,7 @@ module Gitlab
# the current state on the CD diff, so we treat it as outdated.
ac_diff = ac_diffs.diff_file_with_new_path(c_path, c_mode)
- { position: new_position(ac_diff, nil, c_line), outdated: true }
+ { position: new_position(ac_diff, nil, c_line, position.line_range), outdated: true }
end
else
# If the line is still in D and not in C, it is still added.
@@ -112,7 +112,7 @@ module Gitlab
end
else
# If the line is no longer in D, it has been removed from the MR.
- { position: new_position(bd_diff, b_line, nil), outdated: true }
+ { position: new_position(bd_diff, b_line, nil, position.line_range), outdated: true }
end
end
@@ -140,14 +140,14 @@ module Gitlab
# removed line into an unchanged one.
bd_diff = bd_diffs.diff_file_with_new_path(d_path, d_mode)
- { position: new_position(bd_diff, nil, d_line), outdated: true }
+ { position: new_position(bd_diff, nil, d_line, position.line_range), outdated: true }
else
# If the line is still in C and not in D, it is still removed.
{ position: new_position(cd_diff, c_line, nil, position.line_range), outdated: false }
end
else
# If the line is no longer in C, it has been removed outside of the MR.
- { position: new_position(ac_diff, a_line, nil), outdated: true }
+ { position: new_position(ac_diff, a_line, nil, position.line_range), outdated: true }
end
end
diff --git a/lib/gitlab/email/handler/service_desk_handler.rb b/lib/gitlab/email/handler/service_desk_handler.rb
index 74c8d0a1fd7..8d73aa842be 100644
--- a/lib/gitlab/email/handler/service_desk_handler.rb
+++ b/lib/gitlab/email/handler/service_desk_handler.rb
@@ -15,16 +15,14 @@ module Gitlab
PROJECT_KEY_PATTERN = /\A(?<slug>.+)-(?<key>[a-z0-9_]+)\z/.freeze
def initialize(mail, mail_key, service_desk_key: nil)
- super(mail, mail_key)
-
- if service_desk_key.present?
+ if service_desk_key
+ mail_key ||= service_desk_key
@service_desk_key = service_desk_key
- elsif !mail_key&.include?('/') && (matched = HANDLER_REGEX.match(mail_key.to_s))
- @project_slug = matched[:project_slug]
- @project_id = matched[:project_id]&.to_i
- elsif matched = HANDLER_REGEX_LEGACY.match(mail_key.to_s)
- @project_path = matched[:project_path]
end
+
+ super(mail, mail_key)
+
+ match_project_slug || match_legacy_project_slug
end
def can_handle?
@@ -42,15 +40,29 @@ module Gitlab
end
end
+ def match_project_slug
+ return if mail_key&.include?('/')
+ return unless matched = HANDLER_REGEX.match(mail_key.to_s)
+
+ @project_slug = matched[:project_slug]
+ @project_id = matched[:project_id]&.to_i
+ end
+
+ def match_legacy_project_slug
+ return unless matched = HANDLER_REGEX_LEGACY.match(mail_key.to_s)
+
+ @project_path = matched[:project_path]
+ end
+
def metrics_event
:receive_email_service_desk
end
def project
strong_memoize(:project) do
- @project = service_desk_key ? project_from_key : super
- @project = nil unless @project&.service_desk_enabled?
- @project
+ project_record = super
+ project_record ||= project_from_key if service_desk_key
+ project_record&.service_desk_enabled? ? project_record : nil
end
end
@@ -96,7 +108,7 @@ module Gitlab
end
def message_including_template
- description = message_including_reply
+ description = process_message(trim_reply: false, allow_only_quotes: true)
template_content = service_desk_setting&.issue_template_content
if template_content.present?
diff --git a/lib/gitlab/email/message/in_product_marketing.rb b/lib/gitlab/email/message/in_product_marketing.rb
index fb4315e74b2..ac9585bcd1a 100644
--- a/lib/gitlab/email/message/in_product_marketing.rb
+++ b/lib/gitlab/email/message/in_product_marketing.rb
@@ -7,7 +7,8 @@ module Gitlab
UnknownTrackError = Class.new(StandardError)
def self.for(track)
- raise UnknownTrackError unless Namespaces::InProductMarketingEmailsService::TRACKS.key?(track)
+ valid_tracks = [Namespaces::InviteTeamEmailService::TRACK, Namespaces::InProductMarketingEmailsService::TRACKS.keys].flatten
+ raise UnknownTrackError unless valid_tracks.include?(track)
"Gitlab::Email::Message::InProductMarketing::#{track.to_s.classify}".constantize
end
diff --git a/lib/gitlab/email/message/in_product_marketing/admin_verify.rb b/lib/gitlab/email/message/in_product_marketing/admin_verify.rb
index 234b93594b5..19d9cf99cdb 100644
--- a/lib/gitlab/email/message/in_product_marketing/admin_verify.rb
+++ b/lib/gitlab/email/message/in_product_marketing/admin_verify.rb
@@ -36,6 +36,10 @@ module Gitlab
def progress
super(track_name: 'Admin')
end
+
+ def invite_members?
+ invite_members_for_task_experiment_enabled?
+ end
end
end
end
diff --git a/lib/gitlab/email/message/in_product_marketing/base.rb b/lib/gitlab/email/message/in_product_marketing/base.rb
index c4895d35a14..7cd54390b9f 100644
--- a/lib/gitlab/email/message/in_product_marketing/base.rb
+++ b/lib/gitlab/email/message/in_product_marketing/base.rb
@@ -7,16 +7,17 @@ module Gitlab
class Base
include Gitlab::Email::Message::InProductMarketing::Helper
include Gitlab::Routing
+ include Gitlab::Experiment::Dsl
attr_accessor :format
def initialize(group:, user:, series:, format: :html)
- raise ArgumentError, "Only #{total_series} series available for this track." unless series.between?(0, total_series - 1)
-
+ @series = series
@group = group
@user = user
- @series = series
@format = format
+
+ validate_series!
end
def subject_line
@@ -56,6 +57,18 @@ module Gitlab
end
end
+ def invite_members?
+ false
+ end
+
+ def invite_text
+ s_('InProductMarketing|Do you have a teammate who would be perfect for this task?')
+ end
+
+ def invite_link
+ action_link(s_('InProductMarketing|Invite them to help out.'), group_url(group, open_modal: 'invite_members_for_task'))
+ end
+
def unsubscribe
parts = Gitlab.com? ? unsubscribe_com : unsubscribe_self_managed(track, series)
@@ -102,6 +115,10 @@ module Gitlab
["mailers/in_product_marketing", "#{track}-#{series}.png"].join('/')
end
+ def series?
+ total_series > 0
+ end
+
protected
attr_reader :group, :user, :series
@@ -148,6 +165,20 @@ module Gitlab
link(s_('InProductMarketing|update your preferences'), preference_link)
end
+
+ def invite_members_for_task_experiment_enabled?
+ return unless user.can?(:admin_group_member, group)
+
+ experiment(:invite_members_for_task, namespace: group) do |e|
+ e.candidate { true }
+ e.record!
+ e.run
+ end
+ end
+
+ def validate_series!
+ raise ArgumentError, "Only #{total_series} series available for this track." unless @series.between?(0, total_series - 1)
+ end
end
end
end
diff --git a/lib/gitlab/email/message/in_product_marketing/create.rb b/lib/gitlab/email/message/in_product_marketing/create.rb
index 4b0c4af4911..2c396775374 100644
--- a/lib/gitlab/email/message/in_product_marketing/create.rb
+++ b/lib/gitlab/email/message/in_product_marketing/create.rb
@@ -61,6 +61,10 @@ module Gitlab
][series]
end
+ def invite_members?
+ invite_members_for_task_experiment_enabled?
+ end
+
private
def project_link
diff --git a/lib/gitlab/email/message/in_product_marketing/experience.rb b/lib/gitlab/email/message/in_product_marketing/experience.rb
index 4156a737517..7520de6d2a3 100644
--- a/lib/gitlab/email/message/in_product_marketing/experience.rb
+++ b/lib/gitlab/email/message/in_product_marketing/experience.rb
@@ -43,7 +43,9 @@ module Gitlab
survey_id: EASE_SCORE_SURVEY_ID
}
- "#{Gitlab::Saas.com_url}/-/survey_responses?#{params.to_query}"
+ params[:show_incentive] = true if show_incentive?
+
+ "#{gitlab_com_root_url}/-/survey_responses?#{params.to_query}"
end
def feedback_ratings(rating)
@@ -70,9 +72,19 @@ module Gitlab
def show_invite_link
strong_memoize(:show_invite_link) do
- group.member_count > 1 && group.max_member_access_for_user(user) >= GroupMember::DEVELOPER && user.preferred_language == 'en'
+ group.max_member_access_for_user(user) >= GroupMember::DEVELOPER && user.preferred_language == 'en'
end
end
+
+ def show_incentive?
+ show_invite_link && group.member_count > 1
+ end
+
+ def gitlab_com_root_url
+ return root_url.chomp('/') if Rails.env.development?
+
+ Gitlab::Saas.com_url
+ end
end
end
end
diff --git a/lib/gitlab/email/message/in_product_marketing/helper.rb b/lib/gitlab/email/message/in_product_marketing/helper.rb
index cec0aad44a6..bffa90ed4ec 100644
--- a/lib/gitlab/email/message/in_product_marketing/helper.rb
+++ b/lib/gitlab/email/message/in_product_marketing/helper.rb
@@ -36,6 +36,15 @@ module Gitlab
"#{text} (#{link})"
end
end
+
+ def action_link(text, link)
+ case format
+ when :html
+ ActionController::Base.helpers.link_to text, link, target: '_blank', rel: 'noopener noreferrer'
+ else
+ [text, link].join(' >> ')
+ end
+ end
end
end
end
diff --git a/lib/gitlab/email/message/in_product_marketing/invite_team.rb b/lib/gitlab/email/message/in_product_marketing/invite_team.rb
new file mode 100644
index 00000000000..e9334b687f4
--- /dev/null
+++ b/lib/gitlab/email/message/in_product_marketing/invite_team.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Email
+ module Message
+ module InProductMarketing
+ class InviteTeam < Base
+ def subject_line
+ s_('InProductMarketing|Invite your teammates to GitLab')
+ end
+
+ def tagline
+ ''
+ end
+
+ def title
+ s_('InProductMarketing|GitLab is better with teammates to help out!')
+ end
+
+ def subtitle
+ ''
+ end
+
+ def body_line1
+ s_('InProductMarketing|Invite your teammates today and build better code together. You can even assign tasks to new teammates such as setting up CI/CD, to help get projects up and running.')
+ end
+
+ def body_line2
+ ''
+ end
+
+ def cta_text
+ s_('InProductMarketing|Invite your teammates to help')
+ end
+
+ def logo_path
+ 'mailers/in_product_marketing/team-0.png'
+ end
+
+ def series?
+ false
+ end
+
+ private
+
+ def validate_series!
+ raise ArgumentError, "Only one email is sent for this track. Value of `series` should be 0." unless @series == 0
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/email/message/in_product_marketing/verify.rb b/lib/gitlab/email/message/in_product_marketing/verify.rb
index e731c65121e..daf0c969f2b 100644
--- a/lib/gitlab/email/message/in_product_marketing/verify.rb
+++ b/lib/gitlab/email/message/in_product_marketing/verify.rb
@@ -65,6 +65,10 @@ module Gitlab
][series]
end
+ def invite_members?
+ invite_members_for_task_experiment_enabled?
+ end
+
private
def ci_link
diff --git a/lib/gitlab/email/receiver.rb b/lib/gitlab/email/receiver.rb
index 242def826be..526f1188065 100644
--- a/lib/gitlab/email/receiver.rb
+++ b/lib/gitlab/email/receiver.rb
@@ -44,6 +44,10 @@ module Gitlab
}
end
+ def mail
+ strong_memoize(:mail) { build_mail }
+ end
+
private
def handler
@@ -54,10 +58,6 @@ module Gitlab
Handler.for(mail, mail_key)
end
- def mail
- strong_memoize(:mail) { build_mail }
- end
-
def build_mail
Mail::Message.new(@raw)
rescue Encoding::UndefinedConversionError,
diff --git a/lib/gitlab/email/reply_parser.rb b/lib/gitlab/email/reply_parser.rb
index 0f0f4800062..d39fa139abb 100644
--- a/lib/gitlab/email/reply_parser.rb
+++ b/lib/gitlab/email/reply_parser.rb
@@ -4,12 +4,13 @@
module Gitlab
module Email
class ReplyParser
- attr_accessor :message
+ attr_accessor :message, :allow_only_quotes
- def initialize(message, trim_reply: true, append_reply: false)
+ def initialize(message, trim_reply: true, append_reply: false, allow_only_quotes: false)
@message = message
@trim_reply = trim_reply
@append_reply = append_reply
+ @allow_only_quotes = allow_only_quotes
end
def execute
@@ -25,7 +26,12 @@ module Gitlab
# NOTE: We currently don't support empty quotes.
# EmailReplyTrimmer allows this as a special case,
# so we detect it manually here.
- return "" if body.lines.all? { |l| l.strip.empty? || l.start_with?('>') }
+ #
+ # If allow_only_quotes is true a message where all lines starts with ">" is allowed.
+ # This could happen if an email has an empty quote, forwarded without any new content.
+ return "" if body.lines.all? do |l|
+ l.strip.empty? || (!allow_only_quotes && l.start_with?('>'))
+ end
encoded_body = body.force_encoding(encoding).encode("UTF-8")
return encoded_body unless @append_reply
diff --git a/lib/gitlab/emoji.rb b/lib/gitlab/emoji.rb
index 2b5f465d3c5..519b1d94bf5 100644
--- a/lib/gitlab/emoji.rb
+++ b/lib/gitlab/emoji.rb
@@ -4,40 +4,15 @@ module Gitlab
module Emoji
extend self
- def emojis
- Gemojione.index.instance_variable_get(:@emoji_by_name)
- end
-
- def emojis_by_moji
- Gemojione.index.instance_variable_get(:@emoji_by_moji)
- end
-
- def emojis_unicodes
- emojis_by_moji.keys
- end
-
- def emojis_names
- emojis.keys
- end
-
- def emojis_aliases
- @emoji_aliases ||= Gitlab::Json.parse(File.read(Rails.root.join('fixtures', 'emojis', 'aliases.json')))
- end
-
- def emoji_filename(name)
- emojis[name]["unicode"]
- end
-
- def emoji_unicode_filename(moji)
- emojis_by_moji[moji]["unicode"]
- end
-
- def emoji_unicode_version(name)
- emoji_unicode_versions_by_name[name]
- end
+ # When updating emoji assets increase the version below
+ # and update the version number in `app/assets/javascripts/emoji/index.js`
+ EMOJI_VERSION = 1
- def normalize_emoji_name(name)
- emojis_aliases[name] || name
+ # Return a Pathname to emoji's current versioned folder
+ #
+ # @return [Pathname] Absolute Path to versioned emojis folder in `public`
+ def emoji_public_absolute_path
+ Rails.root.join("public/-/emojis/#{EMOJI_VERSION}")
end
def emoji_image_tag(name, src)
@@ -54,23 +29,19 @@ module Gitlab
ActionController::Base.helpers.tag(:img, image_options)
end
- def emoji_exists?(name)
- emojis.has_key?(name)
- end
-
# CSS sprite fallback takes precedence over image fallback
- def gl_emoji_tag(name, options = {})
- emoji_name = emojis_aliases[name] || name
- emoji_info = emojis[emoji_name]
- return unless emoji_info
+ # @param [TanukiEmoji::Character] emoji
+ # @param [Hash] options
+ def gl_emoji_tag(emoji, options = {})
+ return unless emoji
data = {
- name: emoji_name,
- unicode_version: emoji_unicode_version(emoji_name)
+ name: emoji.name,
+ unicode_version: emoji.unicode_version
}
- options = { title: emoji_info['description'], data: data }.merge(options)
+ options = { title: emoji.description, data: data }.merge(options)
- ActionController::Base.helpers.content_tag('gl-emoji', emoji_info['moji'], options)
+ ActionController::Base.helpers.content_tag('gl-emoji', emoji.codepoints, options)
end
def custom_emoji_tag(name, image_source)
@@ -82,12 +53,5 @@ module Gitlab
emoji_image_tag(name, image_source).html_safe
end
end
-
- private
-
- def emoji_unicode_versions_by_name
- @emoji_unicode_versions_by_name ||=
- Gitlab::Json.parse(File.read(Rails.root.join('fixtures', 'emojis', 'emoji-unicode-version-map.json')))
- end
end
end
diff --git a/lib/gitlab/etag_caching/router/restful.rb b/lib/gitlab/etag_caching/router/restful.rb
index 408a901f69d..176676bd6ba 100644
--- a/lib/gitlab/etag_caching/router/restful.rb
+++ b/lib/gitlab/etag_caching/router/restful.rb
@@ -23,7 +23,7 @@ module Gitlab
[
%r(#{RESERVED_WORDS_PREFIX}/noteable/issue/\d+/notes\z),
'issue_notes',
- 'issue_tracking'
+ 'team_planning'
],
[
%r(#{RESERVED_WORDS_PREFIX}/noteable/merge_request/\d+/notes\z),
@@ -33,7 +33,7 @@ module Gitlab
[
%r(#{RESERVED_WORDS_PREFIX}/issues/\d+/realtime_changes\z),
'issue_title',
- 'issue_tracking'
+ 'team_planning'
],
[
%r(#{RESERVED_WORDS_PREFIX}/commit/\S+/pipelines\.json\z),
diff --git a/lib/gitlab/git.rb b/lib/gitlab/git.rb
index a1855132b0c..2da30b88d55 100644
--- a/lib/gitlab/git.rb
+++ b/lib/gitlab/git.rb
@@ -17,6 +17,7 @@ module Gitlab
OSError = Class.new(BaseError)
UnknownRef = Class.new(BaseError)
CommandTimedOut = Class.new(CommandError)
+ InvalidPageToken = Class.new(BaseError)
class << self
include Gitlab::EncodingHelper
diff --git a/lib/gitlab/git/blob.rb b/lib/gitlab/git/blob.rb
index f72217dedde..b0d194f309a 100644
--- a/lib/gitlab/git/blob.rb
+++ b/lib/gitlab/git/blob.rb
@@ -24,7 +24,7 @@ module Gitlab
LFS_POINTER_MIN_SIZE = 120.bytes
LFS_POINTER_MAX_SIZE = 200.bytes
- attr_accessor :size, :mode, :id, :commit_id, :loaded_size, :binary
+ attr_accessor :size, :mode, :id, :commit_id, :loaded_size, :binary, :transformed_for_diff
attr_writer :name, :path, :data
def self.gitlab_blob_truncated_true
@@ -127,6 +127,7 @@ module Gitlab
# Retain the actual size before it is encoded
@loaded_size = @data.bytesize if @data
@loaded_all_data = @loaded_size == size
+ @transformed_for_diff = false
record_metric_blob_size
record_metric_truncated(truncated?)
diff --git a/lib/gitlab/git/commit.rb b/lib/gitlab/git/commit.rb
index 6605e896ef1..267107e04e6 100644
--- a/lib/gitlab/git/commit.rb
+++ b/lib/gitlab/git/commit.rb
@@ -315,10 +315,18 @@ module Gitlab
#
def ref_names(repo)
refs(repo).map do |ref|
- ref.sub(%r{^refs/(heads|remotes|tags)/}, "")
+ strip_ref_prefix(ref)
end
end
+ def first_ref_by_oid(repo)
+ ref = repo.refs_by_oid(oid: id, limit: 1)&.first
+
+ return unless ref
+
+ strip_ref_prefix(ref)
+ end
+
def message
encode! @message
end
@@ -466,6 +474,10 @@ module Gitlab
commit_id.match?(/\s/)
)
end
+
+ def strip_ref_prefix(ref)
+ ref.sub(%r{^refs/(heads|remotes|tags)/}, "")
+ end
end
end
end
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 473bc04661c..5afdcc0bd4c 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -20,6 +20,7 @@ module Gitlab
EMPTY_REPOSITORY_CHECKSUM = '0000000000000000000000000000000000000000'
NoRepository = Class.new(::Gitlab::Git::BaseError)
+ RepositoryExists = Class.new(::Gitlab::Git::BaseError)
InvalidRepository = Class.new(::Gitlab::Git::BaseError)
InvalidBlobName = Class.new(::Gitlab::Git::BaseError)
InvalidRef = Class.new(::Gitlab::Git::BaseError)
@@ -101,6 +102,8 @@ module Gitlab
def create_repository
wrapped_gitaly_errors do
gitaly_repository_client.create_repository
+ rescue GRPC::AlreadyExists => e
+ raise RepositoryExists, e.message
end
end
@@ -198,9 +201,9 @@ module Gitlab
# Returns an Array of Tags
#
- def tags(sort_by: nil)
+ def tags(sort_by: nil, pagination_params: nil)
wrapped_gitaly_errors do
- gitaly_ref_client.tags(sort_by: sort_by)
+ gitaly_ref_client.tags(sort_by: sort_by, pagination_params: pagination_params)
end
end
@@ -519,6 +522,17 @@ module Gitlab
@refs_hash
end
+ # Returns matching refs for OID
+ #
+ # Limit of 0 means there is no limit.
+ def refs_by_oid(oid:, limit: 0)
+ wrapped_gitaly_errors do
+ gitaly_ref_client.find_refs_by_oid(oid: oid, limit: limit)
+ end
+ rescue CommandError, TypeError, NoRepository
+ nil
+ end
+
# Returns url for submodule
#
# Ex.
@@ -784,6 +798,12 @@ module Gitlab
end
end
+ def list_refs
+ wrapped_gitaly_errors do
+ gitaly_ref_client.list_refs
+ end
+ end
+
# Refactoring aid; allows us to copy code from app/models/repository.rb
def commit(ref = 'HEAD')
Gitlab::Git::Commit.find(self, ref)
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index 2c26da037da..cc3f20ab774 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -24,7 +24,6 @@ module Gitlab
end
end
- PEM_REGEX = /\-+BEGIN CERTIFICATE\-+.+?\-+END CERTIFICATE\-+/m.freeze
SERVER_VERSION_FILE = 'GITALY_SERVER_VERSION'
MAXIMUM_GITALY_CALLS = 30
CLIENT_NAME = (Gitlab::Runtime.sidekiq? ? 'gitlab-sidekiq' : 'gitlab-web').freeze
@@ -57,33 +56,15 @@ module Gitlab
# https://gitlab.com/gitlab-org/gitaly/-/blob/bf9f52bc/client/dial.go#L78
{
'grpc.keepalive_time_ms': 20000,
- 'grpc.keepalive_permit_without_calls': 1
+ 'grpc.keepalive_permit_without_calls': 1,
+ 'grpc.http2.max_pings_without_data': 0
}
end
private_class_method :channel_args
- def self.stub_cert_paths
- cert_paths = Dir["#{OpenSSL::X509::DEFAULT_CERT_DIR}/*"]
- cert_paths << OpenSSL::X509::DEFAULT_CERT_FILE if File.exist? OpenSSL::X509::DEFAULT_CERT_FILE
- cert_paths
- end
-
- def self.stub_certs
- return @certs if @certs
-
- @certs = stub_cert_paths.flat_map do |cert_file|
- File.read(cert_file).scan(PEM_REGEX).map do |cert|
- OpenSSL::X509::Certificate.new(cert).to_pem
- rescue OpenSSL::OpenSSLError => e
- Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e, cert_file: cert_file)
- nil
- end.compact
- end.uniq.join("\n")
- end
-
def self.stub_creds(storage)
if URI(address(storage)).scheme == 'tls'
- GRPC::Core::ChannelCredentials.new stub_certs
+ GRPC::Core::ChannelCredentials.new ::Gitlab::X509::Certificate.ca_certs_bundle
else
:this_channel_is_insecure
end
diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb
index 75588ad980c..7c688044e9c 100644
--- a/lib/gitlab/gitaly_client/commit_service.rb
+++ b/lib/gitlab/gitaly_client/commit_service.rb
@@ -205,6 +205,8 @@ module Gitlab
end
def between(from, to)
+ return list_commits(["^" + from, to], reverse: true) if Feature.enabled?(:between_commits_via_list_commits)
+
request = Gitaly::CommitsBetweenRequest.new(
repository: @gitaly_repo,
from: from,
diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb
index 235eef4575e..c064811b1e7 100644
--- a/lib/gitlab/gitaly_client/ref_service.rb
+++ b/lib/gitlab/gitaly_client/ref_service.rb
@@ -77,8 +77,8 @@ module Gitlab
consume_find_local_branches_response(response)
end
- def tags(sort_by: nil)
- request = Gitaly::FindAllTagsRequest.new(repository: @gitaly_repo)
+ def tags(sort_by: nil, pagination_params: nil)
+ request = Gitaly::FindAllTagsRequest.new(repository: @gitaly_repo, pagination_params: pagination_params)
request.sort_by = sort_tags_by_param(sort_by) if sort_by
response = GitalyClient.call(@storage, :ref_service, :find_all_tags, request, timeout: GitalyClient.medium_timeout)
@@ -194,18 +194,39 @@ module Gitlab
raise ArgumentError, ex
end
+ def list_refs(patterns = [Gitlab::Git::BRANCH_REF_PREFIX])
+ request = Gitaly::ListRefsRequest.new(
+ repository: @gitaly_repo,
+ patterns: patterns
+ )
+
+ response = GitalyClient.call(@storage, :ref_service, :list_refs, request, timeout: GitalyClient.fast_timeout)
+ consume_list_refs_response(response)
+ end
+
def pack_refs
request = Gitaly::PackRefsRequest.new(repository: @gitaly_repo)
GitalyClient.call(@storage, :ref_service, :pack_refs, request, timeout: GitalyClient.long_timeout)
end
+ def find_refs_by_oid(oid:, limit:)
+ request = Gitaly::FindRefsByOIDRequest.new(repository: @gitaly_repo, sort_field: :refname, oid: oid, limit: limit)
+
+ response = GitalyClient.call(@storage, :ref_service, :find_refs_by_oid, request, timeout: GitalyClient.medium_timeout)
+ response&.refs&.to_a
+ end
+
private
def consume_refs_response(response)
response.flat_map { |message| message.names.map { |name| yield(name) } }
end
+ def consume_list_refs_response(response)
+ response.flat_map(&:references)
+ end
+
def sort_local_branches_by_param(sort_by)
sort_by = 'name' if sort_by == 'name_asc'
diff --git a/lib/gitlab/github_import/bulk_importing.rb b/lib/gitlab/github_import/bulk_importing.rb
index 80f8f8bfbe2..28a39128ec9 100644
--- a/lib/gitlab/github_import/bulk_importing.rb
+++ b/lib/gitlab/github_import/bulk_importing.rb
@@ -30,7 +30,7 @@ module Gitlab
# Bulk inserts the given rows into the database.
def bulk_insert(model, rows, batch_size: 100)
rows.each_slice(batch_size) do |slice|
- Gitlab::Database.main.bulk_insert(model.table_name, slice) # rubocop:disable Gitlab/BulkInsert
+ ApplicationRecord.legacy_bulk_insert(model.table_name, slice) # rubocop:disable Gitlab/BulkInsert
log_and_increment_counter(slice.size, :imported)
end
diff --git a/lib/gitlab/github_import/importer/diff_note_importer.rb b/lib/gitlab/github_import/importer/diff_note_importer.rb
index 4cfc920e2e3..0aa0896aa57 100644
--- a/lib/gitlab/github_import/importer/diff_note_importer.rb
+++ b/lib/gitlab/github_import/importer/diff_note_importer.rb
@@ -4,41 +4,64 @@ module Gitlab
module GithubImport
module Importer
class DiffNoteImporter
- attr_reader :note, :project, :client, :user_finder
-
- # note - An instance of `Gitlab::GithubImport::Representation::DiffNote`.
- # project - An instance of `Project`.
- # client - An instance of `Gitlab::GithubImport::Client`.
+ # note - An instance of `Gitlab::GithubImport::Representation::DiffNote`
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
def initialize(note, project, client)
@note = note
@project = project
@client = client
- @user_finder = GithubImport::UserFinder.new(project, client)
end
def execute
- return unless (mr_id = find_merge_request_id)
+ return if merge_request_id.blank?
- author_id, author_found = user_finder.author_id_for(note)
+ note.project = project
+ note.merge_request = merge_request
- note_body = MarkdownText.format(note.note, note.author, author_found)
+ build_author_attributes
- attributes = {
- discussion_id: Discussion.discussion_id(note),
- noteable_type: 'MergeRequest',
- noteable_id: mr_id,
- project_id: project.id,
- author_id: author_id,
- note: note_body,
- system: false,
- commit_id: note.original_commit_id,
- line_code: note.line_code,
- type: 'LegacyDiffNote',
- created_at: note.created_at,
- updated_at: note.updated_at,
- st_diff: note.diff_hash.to_yaml
- }
+ # Diff notes with suggestions are imported with DiffNote, which is
+ # slower to import than LegacyDiffNote. Importing DiffNote is slower
+ # because it cannot use the BulkImporting strategy, which skips
+ # callbacks and validations. For this reason, notes that don't have
+ # suggestions are still imported with LegacyDiffNote
+ if import_with_diff_note?
+ import_with_diff_note
+ else
+ import_with_legacy_diff_note
+ end
+ rescue ActiveRecord::InvalidForeignKey => e
+ # It's possible the project and the issue have been deleted since
+ # scheduling this job. In this case we'll just skip creating the note
+ Logger.info(
+ message: e.message,
+ github_identifiers: note.github_identifiers
+ )
+ end
+ private
+
+ attr_reader :note, :project, :client, :author_id, :author_found
+
+ def import_with_diff_note?
+ note.contains_suggestion? && use_diff_note_with_suggestions_enabled?
+ end
+
+ def use_diff_note_with_suggestions_enabled?
+ Feature.enabled?(
+ :github_importer_use_diff_note_with_suggestions,
+ default_enabled: :yaml
+ )
+ end
+
+ def build_author_attributes
+ @author_id, @author_found = user_finder.author_id_for(note)
+ end
+
+ # rubocop:disable Gitlab/BulkInsert
+ def import_with_legacy_diff_note
+ log_diff_note_creation('LegacyDiffNote')
# It's possible that during an import we'll insert tens of thousands
# of diff notes. If we were to use the Note/LegacyDiffNote model here
# we'd also have to run additional queries for both validations and
@@ -47,15 +70,70 @@ module Gitlab
# To work around this we're using bulk_insert with a single row. This
# allows us to efficiently insert data (even if it's just 1 row)
# without having to use all sorts of hacks to disable callbacks.
- Gitlab::Database.main.bulk_insert(LegacyDiffNote.table_name, [attributes]) # rubocop:disable Gitlab/BulkInsert
- rescue ActiveRecord::InvalidForeignKey
- # It's possible the project and the issue have been deleted since
- # scheduling this job. In this case we'll just skip creating the note.
+ ApplicationRecord.legacy_bulk_insert(LegacyDiffNote.table_name, [{
+ noteable_type: note.noteable_type,
+ system: false,
+ type: 'LegacyDiffNote',
+ discussion_id: note.discussion_id,
+ noteable_id: merge_request_id,
+ project_id: project.id,
+ author_id: author_id,
+ note: note_body,
+ commit_id: note.original_commit_id,
+ line_code: note.line_code,
+ created_at: note.created_at,
+ updated_at: note.updated_at,
+ st_diff: note.diff_hash.to_yaml
+ }])
+ end
+ # rubocop:enabled Gitlab/BulkInsert
+
+ def import_with_diff_note
+ log_diff_note_creation('DiffNote')
+
+ ::Import::Github::Notes::CreateService.new(project, author, {
+ noteable_type: note.noteable_type,
+ system: false,
+ type: 'DiffNote',
+ noteable_id: merge_request_id,
+ project_id: project.id,
+ note: note_body,
+ discussion_id: note.discussion_id,
+ commit_id: note.original_commit_id,
+ created_at: note.created_at,
+ updated_at: note.updated_at,
+ position: note.diff_position
+ }).execute
+ end
+
+ def note_body
+ @note_body ||= MarkdownText.format(note.note, note.author, author_found)
+ end
+
+ def author
+ @author ||= User.find(author_id)
+ end
+
+ def merge_request
+ @merge_request ||= MergeRequest.find(merge_request_id)
end
# Returns the ID of the merge request this note belongs to.
- def find_merge_request_id
- GithubImport::IssuableFinder.new(project, note).database_id
+ def merge_request_id
+ @merge_request_id ||= GithubImport::IssuableFinder.new(project, note).database_id
+ end
+
+ def user_finder
+ @user_finder ||= GithubImport::UserFinder.new(project, client)
+ end
+
+ def log_diff_note_creation(model)
+ Logger.info(
+ project_id: project.id,
+ importer: self.class.name,
+ github_identifiers: note.github_identifiers,
+ model: model
+ )
end
end
end
diff --git a/lib/gitlab/github_import/importer/issue_importer.rb b/lib/gitlab/github_import/importer/issue_importer.rb
index f8665676ccf..7f46615f17e 100644
--- a/lib/gitlab/github_import/importer/issue_importer.rb
+++ b/lib/gitlab/github_import/importer/issue_importer.rb
@@ -75,7 +75,7 @@ module Gitlab
end
end
- Gitlab::Database.main.bulk_insert(IssueAssignee.table_name, assignees) # rubocop:disable Gitlab/BulkInsert
+ ApplicationRecord.legacy_bulk_insert(IssueAssignee.table_name, assignees) # rubocop:disable Gitlab/BulkInsert
end
end
end
diff --git a/lib/gitlab/github_import/importer/label_links_importer.rb b/lib/gitlab/github_import/importer/label_links_importer.rb
index b608bb48e38..5e248c7cfc5 100644
--- a/lib/gitlab/github_import/importer/label_links_importer.rb
+++ b/lib/gitlab/github_import/importer/label_links_importer.rb
@@ -40,7 +40,7 @@ module Gitlab
}
end
- Gitlab::Database.main.bulk_insert(LabelLink.table_name, rows) # rubocop:disable Gitlab/BulkInsert
+ ApplicationRecord.legacy_bulk_insert(LabelLink.table_name, rows) # rubocop:disable Gitlab/BulkInsert
end
def find_target_id
diff --git a/lib/gitlab/github_import/importer/note_importer.rb b/lib/gitlab/github_import/importer/note_importer.rb
index 1fd42a69fac..2cc3a82dd9b 100644
--- a/lib/gitlab/github_import/importer/note_importer.rb
+++ b/lib/gitlab/github_import/importer/note_importer.rb
@@ -37,7 +37,7 @@ module Gitlab
# We're using bulk_insert here so we can bypass any validations and
# callbacks. Running these would result in a lot of unnecessary SQL
# queries being executed when importing large projects.
- Gitlab::Database.main.bulk_insert(Note.table_name, [attributes]) # rubocop:disable Gitlab/BulkInsert
+ ApplicationRecord.legacy_bulk_insert(Note.table_name, [attributes]) # rubocop:disable Gitlab/BulkInsert
rescue ActiveRecord::InvalidForeignKey
# It's possible the project and the issue have been deleted since
# scheduling this job. In this case we'll just skip creating the note.
diff --git a/lib/gitlab/github_import/importer/pull_requests_merged_by_importer.rb b/lib/gitlab/github_import/importer/pull_requests_merged_by_importer.rb
index 287e0ea7f7f..c56b391cbec 100644
--- a/lib/gitlab/github_import/importer/pull_requests_merged_by_importer.rb
+++ b/lib/gitlab/github_import/importer/pull_requests_merged_by_importer.rb
@@ -31,9 +31,7 @@ module Gitlab
end
def each_object_to_import
- project.merge_requests.with_state(:merged).find_each do |merge_request|
- next if already_imported?(merge_request)
-
+ merge_requests_to_import.find_each do |merge_request|
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched)
pull_request = client.pull_request(project.import_source, merge_request.iid)
@@ -42,6 +40,17 @@ module Gitlab
mark_as_imported(merge_request)
end
end
+
+ private
+
+ # Returns only the merge requests that still have merged_by to be imported.
+ def merge_requests_to_import
+ project.merge_requests.id_not_in(already_imported_objects).with_state(:merged)
+ end
+
+ def already_imported_objects
+ Gitlab::Cache::Import::Caching.values_from_set(already_imported_cache_key)
+ end
end
end
end
diff --git a/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb b/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb
index bd65eb5899c..5e55d09fe3d 100644
--- a/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb
+++ b/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb
@@ -86,7 +86,7 @@ module Gitlab
# Returns only the merge requests that still have reviews to be imported.
def merge_requests_to_import
- project.merge_requests.where.not(id: already_imported_merge_requests) # rubocop: disable CodeReuse/ActiveRecord
+ project.merge_requests.id_not_in(already_imported_merge_requests)
end
def already_imported_merge_requests
diff --git a/lib/gitlab/github_import/representation/diff_note.rb b/lib/gitlab/github_import/representation/diff_note.rb
index a3dcd2e380c..fecff0644c2 100644
--- a/lib/gitlab/github_import/representation/diff_note.rb
+++ b/lib/gitlab/github_import/representation/diff_note.rb
@@ -7,13 +7,14 @@ module Gitlab
include ToHash
include ExposeAttribute
- attr_reader :attributes
-
- expose_attribute :noteable_type, :noteable_id, :commit_id, :file_path,
- :diff_hunk, :author, :note, :created_at, :updated_at,
- :original_commit_id, :note_id
-
+ NOTEABLE_TYPE = 'MergeRequest'
NOTEABLE_ID_REGEX = %r{/pull/(?<iid>\d+)}i.freeze
+ DISCUSSION_CACHE_KEY = 'github-importer/discussion-id-map/%{project_id}/%{noteable_id}/%{original_note_id}'
+
+ expose_attribute :noteable_id, :commit_id, :file_path,
+ :diff_hunk, :author, :created_at, :updated_at,
+ :original_commit_id, :note_id, :end_line, :start_line,
+ :side, :in_reply_to_id
# Builds a diff note from a GitHub API response.
#
@@ -30,7 +31,6 @@ module Gitlab
user = Representation::User.from_api_response(note.user) if note.user
hash = {
- noteable_type: 'MergeRequest',
noteable_id: matches[:iid].to_i,
file_path: note.path,
commit_id: note.commit_id,
@@ -42,7 +42,9 @@ module Gitlab
updated_at: note.updated_at,
note_id: note.id,
end_line: note.line,
- start_line: note.start_line
+ start_line: note.start_line,
+ side: note.side,
+ in_reply_to_id: note.in_reply_to_id
}
new(hash)
@@ -56,21 +58,41 @@ module Gitlab
new(hash)
end
+ attr_accessor :merge_request, :project
+
# attributes - A Hash containing the raw note details. The keys of this
# Hash must be Symbols.
def initialize(attributes)
@attributes = attributes
+
+ @note_formatter = DiffNotes::SuggestionFormatter.new(
+ note: attributes[:note],
+ start_line: attributes[:start_line],
+ end_line: attributes[:end_line]
+ )
+ end
+
+ def noteable_type
+ NOTEABLE_TYPE
+ end
+
+ def contains_suggestion?
+ @note_formatter.contains_suggestion?
+ end
+
+ def note
+ @note_formatter.formatted_note
end
def line_code
diff_line = Gitlab::Diff::Parser.new.parse(diff_hunk.lines).to_a.last
- Gitlab::Git
- .diff_line_code(file_path, diff_line.new_pos, diff_line.old_pos)
+ Gitlab::Git.diff_line_code(file_path, diff_line.new_pos, diff_line.old_pos)
end
# Returns a Hash that can be used to populate `notes.st_diff`, removing
# the need for requesting Git data for every diff note.
+ # Used when importing with LegacyDiffNote
def diff_hash
{
diff: diff_hunk,
@@ -85,12 +107,15 @@ module Gitlab
}
end
- def note
- @note ||= DiffNotes::SuggestionFormatter.formatted_note_for(
- note: attributes[:note],
- start_line: attributes[:start_line],
- end_line: attributes[:end_line]
- )
+ # Used when importing with DiffNote
+ def diff_position
+ position_params = {
+ diff_refs: merge_request.diff_refs,
+ old_path: file_path,
+ new_path: file_path
+ }
+
+ Gitlab::Diff::Position.new(position_params.merge(diff_line_params))
end
def github_identifiers
@@ -100,6 +125,53 @@ module Gitlab
noteable_type: noteable_type
}
end
+
+ def discussion_id
+ if in_reply_to_id.present?
+ current_discussion_id
+ else
+ Discussion.discussion_id(
+ Struct
+ .new(:noteable_id, :noteable_type)
+ .new(merge_request.id, NOTEABLE_TYPE)
+ ).tap do |discussion_id|
+ cache_discussion_id(discussion_id)
+ end
+ end
+ end
+
+ private
+
+ # Required by ExposeAttribute
+ attr_reader :attributes
+
+ def diff_line_params
+ if addition?
+ { new_line: end_line, old_line: nil }
+ else
+ { new_line: nil, old_line: end_line }
+ end
+ end
+
+ def addition?
+ side == 'RIGHT'
+ end
+
+ def cache_discussion_id(discussion_id)
+ Gitlab::Cache::Import::Caching.write(discussion_id_cache_key(note_id), discussion_id)
+ end
+
+ def current_discussion_id
+ Gitlab::Cache::Import::Caching.read(discussion_id_cache_key(in_reply_to_id))
+ end
+
+ def discussion_id_cache_key(id)
+ DISCUSSION_CACHE_KEY % {
+ project_id: project.id,
+ noteable_id: merge_request.id,
+ original_note_id: id
+ }
+ end
end
end
end
diff --git a/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter.rb b/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter.rb
index 4e5855ee4cd..38b15c4b5bb 100644
--- a/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter.rb
+++ b/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter.rb
@@ -10,30 +10,38 @@ module Gitlab
module Representation
module DiffNotes
class SuggestionFormatter
+ include Gitlab::Utils::StrongMemoize
+
# A github suggestion:
# - the ```suggestion tag must be the first text of the line
# - it might have up to 3 spaces before the ```suggestion tag
# - extra text on the ```suggestion tag line will be ignored
GITHUB_SUGGESTION = /^\ {,3}(?<suggestion>```suggestion\b).*(?<eol>\R)/.freeze
- def self.formatted_note_for(...)
- new(...).formatted_note
- end
-
def initialize(note:, start_line: nil, end_line: nil)
@note = note
@start_line = start_line
@end_line = end_line
end
+ # Returns a tuple with:
+ # - a boolean indicating if the note has suggestions
+ # - the note with the suggestion formatted for Gitlab
def formatted_note
- if contains_suggestion?
- note.gsub(
- GITHUB_SUGGESTION,
- "\\k<suggestion>:#{suggestion_range}\\k<eol>"
- )
- else
- note
+ @formatted_note ||=
+ if contains_suggestion?
+ note.gsub(
+ GITHUB_SUGGESTION,
+ "\\k<suggestion>:#{suggestion_range}\\k<eol>"
+ )
+ else
+ note
+ end
+ end
+
+ def contains_suggestion?
+ strong_memoize(:contain_suggestion) do
+ note.to_s.match?(GITHUB_SUGGESTION)
end
end
@@ -41,10 +49,6 @@ module Gitlab
attr_reader :note, :start_line, :end_line
- def contains_suggestion?
- note.to_s.match?(GITHUB_SUGGESTION)
- end
-
# Github always saves the comment on the _last_ line of the range.
# Therefore, the diff hunk will always be related to lines before
# the comment itself.
diff --git a/lib/gitlab/gon_helper.rb b/lib/gitlab/gon_helper.rb
index 9f628a10771..9ad902efb3a 100644
--- a/lib/gitlab/gon_helper.rb
+++ b/lib/gitlab/gon_helper.rb
@@ -39,6 +39,7 @@ module Gitlab
gon.ee = Gitlab.ee?
gon.jh = Gitlab.jh?
gon.dot_com = Gitlab.com?
+ gon.dev_env_or_com = Gitlab.dev_env_or_com?
if current_user
gon.current_user_id = current_user.id
@@ -55,7 +56,8 @@ module Gitlab
push_frontend_feature_flag(:security_auto_fix, default_enabled: false)
push_frontend_feature_flag(:improved_emoji_picker, default_enabled: :yaml)
push_frontend_feature_flag(:new_header_search, default_enabled: :yaml)
- push_frontend_feature_flag(:suppress_apollo_errors_during_navigation, current_user, default_enabled: :yaml)
+ push_frontend_feature_flag(:configure_iac_scanning_via_mr, current_user, default_enabled: :yaml)
+ push_frontend_feature_flag(:bootstrap_confirmation_modals, default_enabled: :yaml)
end
# Exposes the state of a feature flag to the frontend code.
diff --git a/lib/gitlab/gpg/commit.rb b/lib/gitlab/gpg/commit.rb
index 1abbd6dc45b..9a6317e2b76 100644
--- a/lib/gitlab/gpg/commit.rb
+++ b/lib/gitlab/gpg/commit.rb
@@ -48,7 +48,7 @@ module Gitlab
if gpg_key
Gitlab::Gpg::CurrentKeyChain.add(gpg_key.key)
- clear_memoization(:verified_signature)
+ clear_memoization(:gpg_signatures)
end
yield gpg_key
@@ -56,16 +56,7 @@ module Gitlab
end
def verified_signature
- strong_memoize(:verified_signature) { gpgme_signature }
- end
-
- def gpgme_signature
- GPGME::Crypto.new.verify(signature_text, signed_text: signed_text) do |verified_signature|
- # Return the first signature for now: https://gitlab.com/gitlab-org/gitlab-foss/issues/54932
- break verified_signature
- end
- rescue GPGME::Error
- nil
+ gpg_signatures.first
end
def create_cached_signature!
@@ -77,6 +68,24 @@ module Gitlab
end
end
+ def gpg_signatures
+ strong_memoize(:gpg_signatures) do
+ signatures = []
+
+ GPGME::Crypto.new.verify(signature_text, signed_text: signed_text) do |verified_signature|
+ signatures << verified_signature
+ end
+
+ signatures
+ rescue GPGME::Error
+ []
+ end
+ end
+
+ def multiple_signatures?
+ gpg_signatures.size > 1
+ end
+
def attributes(gpg_key)
user_infos = user_infos(gpg_key)
verification_status = verification_status(gpg_key)
@@ -93,6 +102,7 @@ module Gitlab
end
def verification_status(gpg_key)
+ return :multiple_signatures if multiple_signatures? && Feature.enabled?(:multiple_gpg_signatures, @commit.project, default_enabled: :yaml)
return :unknown_key unless gpg_key
return :unverified_key unless gpg_key.verified?
return :unverified unless verified_signature&.valid?
diff --git a/lib/gitlab/grape_logging/loggers/urgency_logger.rb b/lib/gitlab/grape_logging/loggers/urgency_logger.rb
new file mode 100644
index 00000000000..0a503086d05
--- /dev/null
+++ b/lib/gitlab/grape_logging/loggers/urgency_logger.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GrapeLogging
+ module Loggers
+ class UrgencyLogger < ::GrapeLogging::Loggers::Base
+ def parameters(request, _)
+ endpoint = request.env['api.endpoint']
+ return {} unless endpoint
+
+ urgency = endpoint.options[:for].try(:urgency_for_app, endpoint)
+ return {} unless urgency
+
+ { request_urgency: urgency.name, target_duration_s: urgency.duration }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/known_operations.rb b/lib/gitlab/graphql/known_operations.rb
new file mode 100644
index 00000000000..ead52935945
--- /dev/null
+++ b/lib/gitlab/graphql/known_operations.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ class KnownOperations
+ Operation = Struct.new(:name) do
+ def to_caller_id
+ "graphql:#{name}"
+ end
+
+ def query_urgency
+ # We'll be able to actually correlate query_urgency with https://gitlab.com/gitlab-org/gitlab/-/issues/345141
+ ::Gitlab::EndpointAttributes::DEFAULT_URGENCY
+ end
+ end
+
+ ANONYMOUS = Operation.new("anonymous").freeze
+ UNKNOWN = Operation.new("unknown").freeze
+
+ def self.default
+ @default ||= self.new(Gitlab::Webpack::GraphqlKnownOperations.load)
+ end
+
+ def initialize(operation_names)
+ @operation_hash = operation_names
+ .map { |name| Operation.new(name).freeze }
+ .concat([ANONYMOUS, UNKNOWN])
+ .index_by(&:name)
+ end
+
+ # Returns the known operation from the given ::GraphQL::Query object
+ def from_query(query)
+ operation_name = query.selected_operation_name
+
+ return ANONYMOUS unless operation_name
+
+ @operation_hash[operation_name] || UNKNOWN
+ end
+
+ def operations
+ @operation_hash.values
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/loaders/full_path_model_loader.rb b/lib/gitlab/graphql/loaders/full_path_model_loader.rb
index 7f9013c6e4c..2ea3fa71d5e 100644
--- a/lib/gitlab/graphql/loaders/full_path_model_loader.rb
+++ b/lib/gitlab/graphql/loaders/full_path_model_loader.rb
@@ -16,8 +16,11 @@ module Gitlab
def find
BatchLoader::GraphQL.for(full_path).batch(key: model_class) do |full_paths, loader, args|
+ scope = args[:key]
+ # this logic cannot be placed in the NamespaceResolver due to N+1
+ scope = scope.without_project_namespaces if scope == Namespace
# `with_route` avoids an N+1 calculating full_path
- args[:key].where_full_path_in(full_paths).with_route.each do |model_instance|
+ scope.where_full_path_in(full_paths).with_route.each do |model_instance|
loader.call(model_instance.full_path.downcase, model_instance)
end
end
diff --git a/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb b/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb
index 5a9d21e7469..15f95edd318 100644
--- a/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb
+++ b/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb
@@ -76,7 +76,7 @@ module Gitlab
def items
original_items = super
- return original_items if Gitlab::Pagination::Keyset::Order.keyset_aware?(original_items) || Feature.disabled?(:new_graphql_keyset_pagination)
+ return original_items if Feature.disabled?(:new_graphql_keyset_pagination, default_enabled: :yaml) || Gitlab::Pagination::Keyset::Order.keyset_aware?(original_items)
strong_memoize(:generic_keyset_pagination_items) do
rebuilt_items_with_keyset_order, success = Gitlab::Pagination::Keyset::SimpleOrderBuilder.build(original_items)
diff --git a/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb b/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
index b8d2f5b0f29..207324e73bd 100644
--- a/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
+++ b/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
@@ -10,15 +10,10 @@ module Gitlab
ALL_ANALYZERS = [COMPLEXITY_ANALYZER, DEPTH_ANALYZER, FIELD_USAGE_ANALYZER].freeze
def initial_value(query)
- variables = process_variables(query.provided_variables)
- default_initial_values(query).merge({
- operation_name: query.operation_name,
- query_string: query.query_string,
- variables: variables
- })
- rescue StandardError => e
- Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
- default_initial_values(query)
+ {
+ time_started: Gitlab::Metrics::System.monotonic_time,
+ query: query
+ }
end
def call(memo, *)
@@ -28,25 +23,42 @@ module Gitlab
def final_value(memo)
return if memo.nil?
- complexity, depth, field_usages = GraphQL::Analysis.analyze_query(memo[:query], ALL_ANALYZERS)
+ query = memo[:query]
+ complexity, depth, field_usages = GraphQL::Analysis.analyze_query(query, ALL_ANALYZERS)
memo[:depth] = depth
memo[:complexity] = complexity
# This duration is not the execution time of the
# query but the execution time of the analyzer.
- memo[:duration_s] = duration(memo[:time_started]).round(1)
+ memo[:duration_s] = duration(memo[:time_started])
memo[:used_fields] = field_usages.first
memo[:used_deprecated_fields] = field_usages.second
- RequestStore.store[:graphql_logs] ||= []
- RequestStore.store[:graphql_logs] << memo
- GraphqlLogger.info(memo.except!(:time_started, :query))
+ push_to_request_store(memo)
+
+ # This gl_analysis is included in the tracer log
+ query.context[:gl_analysis] = memo.except!(:time_started, :query)
rescue StandardError => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
end
private
+ def push_to_request_store(memo)
+ query = memo[:query]
+
+ # TODO: This RequestStore management is used to handle setting request wide metadata
+ # to improve preexisting logging. We should handle this either with ApplicationContext
+ # or in a separate tracer.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/343802
+
+ RequestStore.store[:graphql_logs] ||= []
+ RequestStore.store[:graphql_logs] << memo.except(:time_started, :duration_s, :query).merge({
+ variables: process_variables(query.provided_variables),
+ operation_name: query.operation_name
+ })
+ end
+
def process_variables(variables)
filtered_variables = filter_sensitive_variables(variables)
@@ -66,16 +78,6 @@ module Gitlab
def duration(time_started)
Gitlab::Metrics::System.monotonic_time - time_started
end
-
- def default_initial_values(query)
- {
- time_started: Gitlab::Metrics::System.monotonic_time,
- query_string: nil,
- query: query,
- variables: nil,
- duration_s: nil
- }
- end
end
end
end
diff --git a/lib/gitlab/graphql/tracers/application_context_tracer.rb b/lib/gitlab/graphql/tracers/application_context_tracer.rb
new file mode 100644
index 00000000000..4193c46e321
--- /dev/null
+++ b/lib/gitlab/graphql/tracers/application_context_tracer.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Tracers
+ # This graphql-ruby tracer sets up `ApplicationContext` for certain operations.
+ class ApplicationContextTracer
+ def self.use(schema)
+ schema.tracer(self.new)
+ end
+
+ # See docs on expected interface for trace
+ # https://graphql-ruby.org/api-doc/1.12.17/GraphQL/Tracing
+ def trace(key, data)
+ case key
+ when "execute_query"
+ operation = known_operation(data)
+
+ ::Gitlab::ApplicationContext.with_context(caller_id: operation.to_caller_id) do
+ yield
+ end
+ else
+ yield
+ end
+ end
+
+ private
+
+ def known_operation(data)
+ # The library guarantees that we should have :query for execute_query, but we're being defensive here
+ query = data.fetch(:query, nil)
+
+ return ::Gitlab::Graphql::KnownOperations.UNKNOWN unless query
+
+ ::Gitlab::Graphql::KnownOperations.default.from_query(query)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/tracers/logger_tracer.rb b/lib/gitlab/graphql/tracers/logger_tracer.rb
new file mode 100644
index 00000000000..c7ba56824db
--- /dev/null
+++ b/lib/gitlab/graphql/tracers/logger_tracer.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Tracers
+ # This tracer writes logs for certain trace events.
+ # It reads duration metadata written by TimerTracer.
+ class LoggerTracer
+ def self.use(schema)
+ schema.tracer(self.new)
+ end
+
+ def trace(key, data)
+ result = yield
+
+ case key
+ when "execute_query"
+ log_execute_query(**data)
+ end
+
+ result
+ end
+
+ private
+
+ def log_execute_query(query: nil, duration_s: 0)
+ # execute_query should always have :query, but we're just being defensive
+ return unless query
+
+ analysis_info = query.context[:gl_analysis]&.transform_keys { |key| "query_analysis.#{key}" }
+ info = {
+ trace_type: 'execute_query',
+ query_fingerprint: query.fingerprint,
+ duration_s: duration_s,
+ operation_name: query.operation_name,
+ operation_fingerprint: query.operation_fingerprint,
+ is_mutation: query.mutation?,
+ variables: clean_variables(query.provided_variables),
+ query_string: query.query_string
+ }
+
+ info.merge!(::Gitlab::ApplicationContext.current)
+ info.merge!(analysis_info) if analysis_info
+
+ ::Gitlab::GraphqlLogger.info(info)
+ end
+
+ def clean_variables(variables)
+ filtered = ActiveSupport::ParameterFilter
+ .new(::Rails.application.config.filter_parameters)
+ .filter(variables)
+
+ filtered&.to_s
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/tracers/metrics_tracer.rb b/lib/gitlab/graphql/tracers/metrics_tracer.rb
new file mode 100644
index 00000000000..9fc001c0a6d
--- /dev/null
+++ b/lib/gitlab/graphql/tracers/metrics_tracer.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Tracers
+ class MetricsTracer
+ def self.use(schema)
+ schema.tracer(self.new)
+ end
+
+ # See https://graphql-ruby.org/api-doc/1.12.16/GraphQL/Tracing for full list of events
+ def trace(key, data)
+ result = yield
+
+ case key
+ when "execute_query"
+ increment_query_sli(data)
+ end
+
+ result
+ end
+
+ private
+
+ def increment_query_sli(data)
+ duration_s = data.fetch(:duration_s, nil)
+ query = data.fetch(:query, nil)
+
+ # We're just being defensive here...
+ # duration_s comes from TimerTracer and we should be pretty much guaranteed it exists
+ return unless duration_s && query
+
+ operation = ::Gitlab::Graphql::KnownOperations.default.from_query(query)
+ query_urgency = operation.query_urgency
+
+ Gitlab::Metrics::RailsSlis.graphql_query_apdex.increment(
+ labels: {
+ endpoint_id: ::Gitlab::ApplicationContext.current_context_attribute(:caller_id),
+ feature_category: ::Gitlab::ApplicationContext.current_context_attribute(:feature_category),
+ query_urgency: query_urgency.name
+ },
+ success: duration_s <= query_urgency.duration
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/tracers/timer_tracer.rb b/lib/gitlab/graphql/tracers/timer_tracer.rb
new file mode 100644
index 00000000000..326620a22bc
--- /dev/null
+++ b/lib/gitlab/graphql/tracers/timer_tracer.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Tracers
+ # This graphql-ruby tracer records duration for trace events and merges
+ # the duration into the trace event's metadata. This way, separate tracers
+ # can all use the same duration information.
+ #
+ # NOTE: TimerTracer should be applied last **after** other tracers, so
+ # that it runs first (similar to function composition)
+ class TimerTracer
+ def self.use(schema)
+ schema.tracer(self.new)
+ end
+
+ def trace(key, data)
+ start_time = Gitlab::Metrics::System.monotonic_time
+
+ result = yield
+
+ duration_s = Gitlab::Metrics::System.monotonic_time - start_time
+
+ data[:duration_s] = duration_s
+
+ result
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/variables.rb b/lib/gitlab/graphql/variables.rb
index e17ca56d022..102a269dd5b 100644
--- a/lib/gitlab/graphql/variables.rb
+++ b/lib/gitlab/graphql/variables.rb
@@ -24,8 +24,13 @@ module Gitlab
else
{}
end
- when Hash, ActionController::Parameters
+ when Hash
ambiguous_param
+ when ActionController::Parameters
+ # We can and have to trust the "Parameters" because `graphql-ruby` handles this hash safely
+ # Also, `graphql-ruby` uses hash-specific methods, for example `size`:
+ # https://sourcegraph.com/github.com/rmosolgo/graphql-ruby@61232b03412df6685406fc46c414e11d3f447817/-/blob/lib/graphql/query.rb?L304
+ ambiguous_param.to_unsafe_h
when nil
{}
else
diff --git a/lib/gitlab/health_checks/metric.rb b/lib/gitlab/health_checks/metric.rb
index b697cb0d027..c1e437831d7 100644
--- a/lib/gitlab/health_checks/metric.rb
+++ b/lib/gitlab/health_checks/metric.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -5,3 +6,5 @@ module Gitlab
Metric = Struct.new(:name, :value, :labels)
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/health_checks/probes/status.rb b/lib/gitlab/health_checks/probes/status.rb
index 192e9366001..1c59f18ff7d 100644
--- a/lib/gitlab/health_checks/probes/status.rb
+++ b/lib/gitlab/health_checks/probes/status.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -12,3 +13,5 @@ module Gitlab
end
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/health_checks/redis/cache_check.rb b/lib/gitlab/health_checks/redis/cache_check.rb
index 0c8fe83893b..bd843bdaac4 100644
--- a/lib/gitlab/health_checks/redis/cache_check.rb
+++ b/lib/gitlab/health_checks/redis/cache_check.rb
@@ -4,31 +4,7 @@ module Gitlab
module HealthChecks
module Redis
class CacheCheck
- extend SimpleAbstractCheck
-
- class << self
- def check_up
- check
- end
-
- private
-
- def metric_prefix
- 'redis_cache_ping'
- end
-
- def successful?(result)
- result == 'PONG'
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def check
- catch_timeout 10.seconds do
- Gitlab::Redis::Cache.with(&:ping)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
+ extend RedisAbstractCheck
end
end
end
diff --git a/lib/gitlab/health_checks/redis/queues_check.rb b/lib/gitlab/health_checks/redis/queues_check.rb
index b1e33b9f459..fb92db937dc 100644
--- a/lib/gitlab/health_checks/redis/queues_check.rb
+++ b/lib/gitlab/health_checks/redis/queues_check.rb
@@ -4,31 +4,7 @@ module Gitlab
module HealthChecks
module Redis
class QueuesCheck
- extend SimpleAbstractCheck
-
- class << self
- def check_up
- check
- end
-
- private
-
- def metric_prefix
- 'redis_queues_ping'
- end
-
- def successful?(result)
- result == 'PONG'
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def check
- catch_timeout 10.seconds do
- Gitlab::Redis::Queues.with(&:ping)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
+ extend RedisAbstractCheck
end
end
end
diff --git a/lib/gitlab/health_checks/redis/rate_limiting_check.rb b/lib/gitlab/health_checks/redis/rate_limiting_check.rb
index 67c14e26361..0e9d94f7dff 100644
--- a/lib/gitlab/health_checks/redis/rate_limiting_check.rb
+++ b/lib/gitlab/health_checks/redis/rate_limiting_check.rb
@@ -4,31 +4,7 @@ module Gitlab
module HealthChecks
module Redis
class RateLimitingCheck
- extend SimpleAbstractCheck
-
- class << self
- def check_up
- check
- end
-
- private
-
- def metric_prefix
- 'redis_rate_limiting_ping'
- end
-
- def successful?(result)
- result == 'PONG'
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def check
- catch_timeout 10.seconds do
- Gitlab::Redis::RateLimiting.with(&:ping)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
+ extend RedisAbstractCheck
end
end
end
diff --git a/lib/gitlab/health_checks/redis/redis_abstract_check.rb b/lib/gitlab/health_checks/redis/redis_abstract_check.rb
new file mode 100644
index 00000000000..ecad4b06ea9
--- /dev/null
+++ b/lib/gitlab/health_checks/redis/redis_abstract_check.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module HealthChecks
+ module Redis
+ module RedisAbstractCheck
+ include SimpleAbstractCheck
+
+ def check_up
+ successful?(check)
+ end
+
+ private
+
+ def redis_instance_class_name
+ Gitlab::Redis.const_get(redis_instance_name.camelize, false)
+ end
+
+ def metric_prefix
+ "redis_#{redis_instance_name}_ping"
+ end
+
+ def redis_instance_name
+ name.sub(/_check$/, '')
+ end
+
+ def successful?(result)
+ result == 'PONG'
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def check
+ catch_timeout 10.seconds do
+ redis_instance_class_name.with(&:ping)
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/health_checks/redis/redis_check.rb b/lib/gitlab/health_checks/redis/redis_check.rb
index 25879c18f84..c793a939abd 100644
--- a/lib/gitlab/health_checks/redis/redis_check.rb
+++ b/lib/gitlab/health_checks/redis/redis_check.rb
@@ -14,16 +14,22 @@ module Gitlab
end
def successful?(result)
- result == 'PONG'
+ result == true
end
def check
- ::Gitlab::HealthChecks::Redis::CacheCheck.check_up &&
- ::Gitlab::HealthChecks::Redis::QueuesCheck.check_up &&
- ::Gitlab::HealthChecks::Redis::SharedStateCheck.check_up &&
- ::Gitlab::HealthChecks::Redis::TraceChunksCheck.check_up &&
- ::Gitlab::HealthChecks::Redis::RateLimitingCheck.check_up &&
- ::Gitlab::HealthChecks::Redis::SessionsCheck.check_up
+ redis_health_checks.all?(&:check_up)
+ end
+
+ def redis_health_checks
+ [
+ Gitlab::HealthChecks::Redis::CacheCheck,
+ Gitlab::HealthChecks::Redis::QueuesCheck,
+ Gitlab::HealthChecks::Redis::SharedStateCheck,
+ Gitlab::HealthChecks::Redis::TraceChunksCheck,
+ Gitlab::HealthChecks::Redis::RateLimitingCheck,
+ Gitlab::HealthChecks::Redis::SessionsCheck
+ ]
end
end
end
diff --git a/lib/gitlab/health_checks/redis/sessions_check.rb b/lib/gitlab/health_checks/redis/sessions_check.rb
index a0c5e177b4e..90a4c868f40 100644
--- a/lib/gitlab/health_checks/redis/sessions_check.rb
+++ b/lib/gitlab/health_checks/redis/sessions_check.rb
@@ -4,31 +4,7 @@ module Gitlab
module HealthChecks
module Redis
class SessionsCheck
- extend SimpleAbstractCheck
-
- class << self
- def check_up
- check
- end
-
- private
-
- def metric_prefix
- 'redis_sessions_ping'
- end
-
- def successful?(result)
- result == 'PONG'
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def check
- catch_timeout 10.seconds do
- Gitlab::Redis::Sessions.with(&:ping)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
+ extend RedisAbstractCheck
end
end
end
diff --git a/lib/gitlab/health_checks/redis/shared_state_check.rb b/lib/gitlab/health_checks/redis/shared_state_check.rb
index 285ac271929..80f91784b8c 100644
--- a/lib/gitlab/health_checks/redis/shared_state_check.rb
+++ b/lib/gitlab/health_checks/redis/shared_state_check.rb
@@ -4,31 +4,7 @@ module Gitlab
module HealthChecks
module Redis
class SharedStateCheck
- extend SimpleAbstractCheck
-
- class << self
- def check_up
- check
- end
-
- private
-
- def metric_prefix
- 'redis_shared_state_ping'
- end
-
- def successful?(result)
- result == 'PONG'
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def check
- catch_timeout 10.seconds do
- Gitlab::Redis::SharedState.with(&:ping)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
+ extend RedisAbstractCheck
end
end
end
diff --git a/lib/gitlab/health_checks/redis/trace_chunks_check.rb b/lib/gitlab/health_checks/redis/trace_chunks_check.rb
index cf9fa700b0a..9a89a1ce51d 100644
--- a/lib/gitlab/health_checks/redis/trace_chunks_check.rb
+++ b/lib/gitlab/health_checks/redis/trace_chunks_check.rb
@@ -4,31 +4,7 @@ module Gitlab
module HealthChecks
module Redis
class TraceChunksCheck
- extend SimpleAbstractCheck
-
- class << self
- def check_up
- check
- end
-
- private
-
- def metric_prefix
- 'redis_trace_chunks_ping'
- end
-
- def successful?(result)
- result == 'PONG'
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def check
- catch_timeout 10.seconds do
- Gitlab::Redis::TraceChunks.with(&:ping)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
+ extend RedisAbstractCheck
end
end
end
diff --git a/lib/gitlab/health_checks/result.rb b/lib/gitlab/health_checks/result.rb
index 38a36100ec7..cbb847d2af2 100644
--- a/lib/gitlab/health_checks/result.rb
+++ b/lib/gitlab/health_checks/result.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -13,3 +14,5 @@ module Gitlab
end
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/http.rb b/lib/gitlab/http.rb
index 8a19f208adf..1b860001ac0 100644
--- a/lib/gitlab/http.rb
+++ b/lib/gitlab/http.rb
@@ -15,7 +15,7 @@ module Gitlab
].freeze
HTTP_ERRORS = HTTP_TIMEOUT_ERRORS + [
EOFError, SocketError, OpenSSL::SSL::SSLError, OpenSSL::OpenSSLError,
- Errno::ECONNRESET, Errno::ECONNREFUSED, Errno::EHOSTUNREACH,
+ Errno::ECONNRESET, Errno::ECONNREFUSED, Errno::EHOSTUNREACH, Errno::ENETUNREACH,
Gitlab::HTTP::BlockedUrlError, Gitlab::HTTP::RedirectionTooDeep
].freeze
diff --git a/lib/gitlab/i18n.rb b/lib/gitlab/i18n.rb
index b090d05de19..251bc34d462 100644
--- a/lib/gitlab/i18n.rb
+++ b/lib/gitlab/i18n.rb
@@ -44,26 +44,26 @@ module Gitlab
'bg' => 0,
'cs_CZ' => 0,
'da_DK' => 52,
- 'de' => 16,
+ 'de' => 15,
'en' => 100,
'eo' => 0,
- 'es' => 41,
+ 'es' => 40,
'fil_PH' => 0,
'fr' => 11,
'gl_ES' => 0,
'id_ID' => 0,
'it' => 2,
- 'ja' => 37,
+ 'ja' => 36,
'ko' => 11,
- 'nb_NO' => 35,
+ 'nb_NO' => 34,
'nl_NL' => 0,
'pl_PL' => 5,
- 'pt_BR' => 45,
+ 'pt_BR' => 49,
'ro_RO' => 24,
- 'ru' => 27,
- 'tr_TR' => 16,
- 'uk' => 40,
- 'zh_CN' => 95,
+ 'ru' => 26,
+ 'tr_TR' => 15,
+ 'uk' => 39,
+ 'zh_CN' => 97,
'zh_HK' => 2,
'zh_TW' => 3
}.freeze
diff --git a/lib/gitlab/import/database_helpers.rb b/lib/gitlab/import/database_helpers.rb
index e73c3afe9bd..96490db0c07 100644
--- a/lib/gitlab/import/database_helpers.rb
+++ b/lib/gitlab/import/database_helpers.rb
@@ -11,8 +11,8 @@ module Gitlab
# We use bulk_insert here so we can bypass any queries executed by
# callbacks or validation rules, as doing this wouldn't scale when
# importing very large projects.
- result = Gitlab::Database.main # rubocop:disable Gitlab/BulkInsert
- .bulk_insert(relation.table_name, [attributes], return_ids: true)
+ result = ApplicationRecord # rubocop:disable Gitlab/BulkInsert
+ .legacy_bulk_insert(relation.table_name, [attributes], return_ids: true)
result.first
end
diff --git a/lib/gitlab/import/metrics.rb b/lib/gitlab/import/metrics.rb
index 5f27d0ab965..7a0cf1682a6 100644
--- a/lib/gitlab/import/metrics.rb
+++ b/lib/gitlab/import/metrics.rb
@@ -69,11 +69,7 @@ module Gitlab
end
def observe_histogram
- if project.github_import?
- duration_histogram.observe({ project: project.full_path }, duration)
- else
- duration_histogram.observe({ importer: importer }, duration)
- end
+ duration_histogram.observe({ importer: importer }, duration)
end
def track_finish_metric
diff --git a/lib/gitlab/import_export/attributes_permitter.rb b/lib/gitlab/import_export/attributes_permitter.rb
index 2d8e25a9f70..f6f65f85599 100644
--- a/lib/gitlab/import_export/attributes_permitter.rb
+++ b/lib/gitlab/import_export/attributes_permitter.rb
@@ -44,7 +44,7 @@ module Gitlab
# We want to use AttributesCleaner for these relations instead, in the future this should be removed to make sure
# we are using AttributesPermitter for every imported relation.
- DISABLED_RELATION_NAMES = %i[user author issuable_sla].freeze
+ DISABLED_RELATION_NAMES = %i[author issuable_sla].freeze
def initialize(config: ImportExport::Config.new.to_h)
@config = config
diff --git a/lib/gitlab/import_export/base/object_builder.rb b/lib/gitlab/import_export/base/object_builder.rb
index 48836729ff6..5e9c8292c1e 100644
--- a/lib/gitlab/import_export/base/object_builder.rb
+++ b/lib/gitlab/import_export/base/object_builder.rb
@@ -47,15 +47,15 @@ module Gitlab
attributes
end
- private
+ def find_with_cache(key = cache_key)
+ return yield unless lru_cache && key
- attr_reader :klass, :attributes, :lru_cache, :cache_key
+ lru_cache[key] ||= yield
+ end
- def find_with_cache
- return yield unless lru_cache && cache_key
+ private
- lru_cache[cache_key] ||= yield
- end
+ attr_reader :klass, :attributes, :lru_cache, :cache_key
def cache_from_request_store
Gitlab::SafeRequestStore[:lru_cache] ||= LruRedux::Cache.new(LRU_CACHE_SIZE)
diff --git a/lib/gitlab/import_export/decompressed_archive_size_validator.rb b/lib/gitlab/import_export/decompressed_archive_size_validator.rb
index febfe00af0b..61b37256964 100644
--- a/lib/gitlab/import_export/decompressed_archive_size_validator.rb
+++ b/lib/gitlab/import_export/decompressed_archive_size_validator.rb
@@ -6,7 +6,7 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
DEFAULT_MAX_BYTES = 10.gigabytes.freeze
- TIMEOUT_LIMIT = 60.seconds
+ TIMEOUT_LIMIT = 210.seconds
def initialize(archive_path:, max_bytes: self.class.max_bytes)
@archive_path = archive_path
diff --git a/lib/gitlab/import_export/group/relation_tree_restorer.rb b/lib/gitlab/import_export/group/relation_tree_restorer.rb
new file mode 100644
index 00000000000..f3c392b8c20
--- /dev/null
+++ b/lib/gitlab/import_export/group/relation_tree_restorer.rb
@@ -0,0 +1,274 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Group
+ class RelationTreeRestorer
+ def initialize( # rubocop:disable Metrics/ParameterLists
+ user:,
+ shared:,
+ relation_reader:,
+ members_mapper:,
+ object_builder:,
+ relation_factory:,
+ reader:,
+ importable:,
+ importable_attributes:,
+ importable_path:
+ )
+ @user = user
+ @shared = shared
+ @importable = importable
+ @relation_reader = relation_reader
+ @members_mapper = members_mapper
+ @object_builder = object_builder
+ @relation_factory = relation_factory
+ @reader = reader
+ @importable_attributes = importable_attributes
+ @importable_path = importable_path
+ end
+
+ def restore
+ ActiveRecord::Base.uncached do
+ ActiveRecord::Base.no_touching do
+ update_params!
+
+ BulkInsertableAssociations.with_bulk_insert(enabled: bulk_insert_enabled) do
+ fix_ci_pipelines_not_sorted_on_legacy_project_json!
+ create_relations!
+ end
+ end
+ end
+
+ # ensure that we have latest version of the restore
+ @importable.reload # rubocop:disable Cop/ActiveRecordAssociationReload
+
+ true
+ rescue StandardError => e
+ @shared.error(e)
+ false
+ end
+
+ private
+
+ def bulk_insert_enabled
+ false
+ end
+
+ # Loops through the tree of models defined in import_export.yml and
+ # finds them in the imported JSON so they can be instantiated and saved
+ # in the DB. The structure and relationships between models are guessed from
+ # the configuration yaml file too.
+ # Finally, it updates each attribute in the newly imported project/group.
+ def create_relations!
+ relations.each do |relation_key, relation_definition|
+ process_relation!(relation_key, relation_definition)
+ end
+ end
+
+ def process_relation!(relation_key, relation_definition)
+ @relation_reader.consume_relation(@importable_path, relation_key).each do |data_hash, relation_index|
+ process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
+ end
+ end
+
+ def process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
+ relation_object = build_relation(relation_key, relation_definition, relation_index, data_hash)
+ return unless relation_object
+ return if relation_invalid_for_importable?(relation_object)
+
+ relation_object.assign_attributes(importable_class_sym => @importable)
+
+ import_failure_service.with_retry(action: 'relation_object.save!', relation_key: relation_key, relation_index: relation_index) do
+ relation_object.save!
+ log_relation_creation(@importable, relation_key, relation_object)
+ end
+ rescue StandardError => e
+ import_failure_service.log_import_failure(
+ source: 'process_relation_item!',
+ relation_key: relation_key,
+ relation_index: relation_index,
+ exception: e)
+ end
+
+ def import_failure_service
+ @import_failure_service ||= ImportFailureService.new(@importable)
+ end
+
+ def relations
+ @relations ||=
+ @reader
+ .attributes_finder
+ .find_relations_tree(importable_class_sym)
+ .deep_stringify_keys
+ end
+
+ def update_params!
+ params = @importable_attributes.except(*relations.keys.map(&:to_s))
+ params = params.merge(present_override_params)
+
+ # Cleaning all imported and overridden params
+ params = Gitlab::ImportExport::AttributeCleaner.clean(
+ relation_hash: params,
+ relation_class: importable_class,
+ excluded_keys: excluded_keys_for_relation(importable_class_sym))
+
+ @importable.assign_attributes(params)
+
+ modify_attributes
+
+ Gitlab::Timeless.timeless(@importable) do
+ @importable.save!
+ end
+ end
+
+ def present_override_params
+ # we filter out the empty strings from the overrides
+ # keeping the default values configured
+ override_params&.transform_values do |value|
+ value.is_a?(String) ? value.presence : value
+ end&.compact
+ end
+
+ def override_params
+ @importable_override_params ||= importable_override_params
+ end
+
+ def importable_override_params
+ if @importable.respond_to?(:import_data)
+ @importable.import_data&.data&.fetch('override_params', nil) || {}
+ else
+ {}
+ end
+ end
+
+ def modify_attributes
+ # no-op to be overridden on inheritance
+ end
+
+ def build_relations(relation_key, relation_definition, relation_index, data_hashes)
+ data_hashes
+ .map { |data_hash| build_relation(relation_key, relation_definition, relation_index, data_hash) }
+ .tap { |entries| entries.compact! }
+ end
+
+ def build_relation(relation_key, relation_definition, relation_index, data_hash)
+ # TODO: This is hack to not create relation for the author
+ # Rather make `RelationFactory#set_note_author` to take care of that
+ return data_hash if relation_key == 'author' || already_restored?(data_hash)
+
+ # create relation objects recursively for all sub-objects
+ relation_definition.each do |sub_relation_key, sub_relation_definition|
+ transform_sub_relations!(data_hash, sub_relation_key, sub_relation_definition, relation_index)
+ end
+
+ relation = @relation_factory.create(**relation_factory_params(relation_key, relation_index, data_hash))
+
+ if relation && !relation.valid?
+ @shared.logger.warn(
+ message: "[Project/Group Import] Invalid object relation built",
+ relation_key: relation_key,
+ relation_index: relation_index,
+ relation_class: relation.class.name,
+ error_messages: relation.errors.full_messages.join(". ")
+ )
+ end
+
+ relation
+ end
+
+ # Since we update the data hash in place as we restore relation items,
+ # and since we also de-duplicate items, we might encounter items that
+ # have already been restored in a previous iteration.
+ def already_restored?(relation_item)
+ !relation_item.is_a?(Hash)
+ end
+
+ def transform_sub_relations!(data_hash, sub_relation_key, sub_relation_definition, relation_index)
+ sub_data_hash = data_hash[sub_relation_key]
+ return unless sub_data_hash
+
+ # if object is a hash we can create simple object
+ # as it means that this is 1-to-1 vs 1-to-many
+ current_item =
+ if sub_data_hash.is_a?(Array)
+ build_relations(
+ sub_relation_key,
+ sub_relation_definition,
+ relation_index,
+ sub_data_hash).presence
+ else
+ build_relation(
+ sub_relation_key,
+ sub_relation_definition,
+ relation_index,
+ sub_data_hash)
+ end
+
+ if current_item
+ data_hash[sub_relation_key] = current_item
+ else
+ data_hash.delete(sub_relation_key)
+ end
+ end
+
+ def relation_invalid_for_importable?(_relation_object)
+ false
+ end
+
+ def excluded_keys_for_relation(relation)
+ @reader.attributes_finder.find_excluded_keys(relation)
+ end
+
+ def importable_class
+ @importable.class
+ end
+
+ def importable_class_sym
+ importable_class.to_s.downcase.to_sym
+ end
+
+ def relation_factory_params(relation_key, relation_index, data_hash)
+ {
+ relation_index: relation_index,
+ relation_sym: relation_key.to_sym,
+ relation_hash: data_hash,
+ importable: @importable,
+ members_mapper: @members_mapper,
+ object_builder: @object_builder,
+ user: @user,
+ excluded_keys: excluded_keys_for_relation(relation_key)
+ }
+ end
+
+ # Temporary fix for https://gitlab.com/gitlab-org/gitlab/-/issues/27883 when import from legacy project.json
+ # This should be removed once legacy JSON format is deprecated.
+ # Ndjson export file will fix the order during project export.
+ def fix_ci_pipelines_not_sorted_on_legacy_project_json!
+ return unless @relation_reader.legacy?
+
+ @relation_reader.sort_ci_pipelines_by_id
+ end
+
+ # Enable logging of each top-level relation creation when Importing
+ # into a Group if feature flag is enabled
+ def log_relation_creation(importable, relation_key, relation_object)
+ root_ancestor_group = importable.try(:root_ancestor)
+
+ return unless root_ancestor_group
+ return unless root_ancestor_group.instance_of?(::Group)
+ return unless Feature.enabled?(:log_import_export_relation_creation, root_ancestor_group)
+
+ @shared.logger.info(
+ importable_type: importable.class.to_s,
+ importable_id: importable.id,
+ relation_key: relation_key,
+ relation_id: relation_object.id,
+ author_id: relation_object.try(:author_id),
+ message: '[Project/Group Import] Created new object relation'
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/import_export.yml b/lib/gitlab/import_export/project/import_export.yml
index 618ef9a4f43..d815dd284ba 100644
--- a/lib/gitlab/import_export/project/import_export.yml
+++ b/lib/gitlab/import_export/project/import_export.yml
@@ -178,17 +178,7 @@ included_attributes:
- :project_id
- :key
- :value
- label:
- - :title
- - :color
- - :project_id
- - :group_id
- - :created_at
- - :updated_at
- - :template
- - :description
- - :priority
- labels:
+ label: &label_definition
- :title
- :color
- :project_id
@@ -198,23 +188,13 @@ included_attributes:
- :template
- :description
- :priority
+ labels: *label_definition
priorities:
- :project_id
- :priority
- :created_at
- :updated_at
- milestone:
- - :iid
- - :title
- - :project_id
- - :group_id
- - :description
- - :due_date
- - :created_at
- - :updated_at
- - :start_date
- - :state
- milestones:
+ milestone: &milestone_definition
- :iid
- :title
- :project_id
@@ -225,6 +205,7 @@ included_attributes:
- :updated_at
- :start_date
- :state
+ milestones: *milestone_definition
protected_branches:
- :project_id
- :name
@@ -272,6 +253,385 @@ included_attributes:
- :updated_at
- :filepath
- :link_type
+ container_expiration_policy:
+ - :created_at
+ - :updated_at
+ - :next_run_at
+ - :project_id
+ - :name_regex
+ - :cadence
+ - :older_than
+ - :keep_n
+ - :enabled
+ - :name_regex_keep
+ project_feature:
+ - :project_id
+ - :merge_requests_access_level
+ - :issues_access_level
+ - :wiki_access_level
+ - :snippets_access_level
+ - :builds_access_level
+ - :created_at
+ - :updated_at
+ - :repository_access_level
+ - :pages_access_level
+ - :forking_access_level
+ - :metrics_dashboard_access_level
+ - :operations_access_level
+ - :analytics_access_level
+ - :security_and_compliance_access_level
+ - :container_registry_access_level
+ prometheus_metrics:
+ - :created_at
+ - :updated_at
+ - :project_id
+ - :y_label
+ - :unit
+ - :legend
+ - :title
+ - :query
+ - :group
+ - :dashboard_path
+ service_desk_setting:
+ - :project_id
+ - :issue_template_key
+ - :project_key
+ snippets:
+ - :title
+ - :content
+ - :author_id
+ - :project_id
+ - :created_at
+ - :updated_at
+ - :file_name
+ - :visibility_level
+ - :description
+ project_members:
+ - :access_level
+ - :source_type
+ - :user_id
+ - :notification_level
+ - :created_at
+ - :updated_at
+ - :created_by_id
+ - :invite_email
+ - :invite_accepted_at
+ - :requested_at
+ - :expires_at
+ - :ldap
+ - :override
+ merge_request: &merge_request_definition
+ - :target_branch
+ - :source_branch
+ - :source_project_id
+ - :author_id
+ - :assignee_id
+ - :title
+ - :created_at
+ - :updated_at
+ - :state
+ - :merge_status
+ - :target_project_id
+ - :iid
+ - :description
+ - :updated_by_id
+ - :merge_error
+ - :merge_params
+ - :merge_when_pipeline_succeeds
+ - :merge_user_id
+ - :merge_commit_sha
+ - :squash_commit_sha
+ - :in_progress_merge_commit_sha
+ - :lock_version
+ - :approvals_before_merge
+ - :rebase_commit_sha
+ - :time_estimate
+ - :squash
+ - :last_edited_at
+ - :last_edited_by_id
+ - :discussion_locked
+ - :allow_maintainer_to_push
+ - :merge_ref_sha
+ - :draft
+ - :diff_head_sha
+ - :source_branch_sha
+ - :target_branch_sha
+ merge_requests: *merge_request_definition
+ award_emoji:
+ - :user_id
+ - :name
+ - :awardable_type
+ - :created_at
+ - :updated_at
+ commit_author:
+ - :name
+ - :email
+ committer:
+ - :name
+ - :email
+ events:
+ - :target_type
+ - :action
+ - :author_id
+ - :fingerprint
+ - :created_at
+ - :updated_at
+ label_links:
+ - :target_type
+ - :created_at
+ - :updated_at
+ merge_request_diff:
+ - :state
+ - :created_at
+ - :updated_at
+ - :base_commit_sha
+ - :real_size
+ - :head_commit_sha
+ - :start_commit_sha
+ - :commits_count
+ - :files_count
+ - :sorted
+ - :diff_type
+ merge_request_diff_commits:
+ - :author_name
+ - :author_email
+ - :committer_name
+ - :committer_email
+ - :relative_order
+ - :sha
+ - :authored_date
+ - :committed_date
+ - :message
+ - :trailers
+ merge_request_diff_files:
+ - :relative_order
+ - :new_file
+ - :renamed_file
+ - :deleted_file
+ - :new_path
+ - :old_path
+ - :a_mode
+ - :b_mode
+ - :too_large
+ - :binary
+ - :diff
+ metrics:
+ - :created_at
+ - :updated_at
+ - :latest_closed_by_id
+ - :latest_closed_at
+ - :merged_by_id
+ - :merged_at
+ - :latest_build_started_at
+ - :latest_build_finished_at
+ - :first_deployed_to_production_at
+ - :first_comment_at
+ - :first_commit_at
+ - :last_commit_at
+ - :diff_size
+ - :modified_paths_size
+ - :commits_count
+ - :first_approved_at
+ - :first_reassigned_at
+ - :added_lines
+ - :target_project_id
+ - :removed_lines
+ notes:
+ - :note
+ - :noteable_type
+ - :author_id
+ - :created_at
+ - :updated_at
+ - :project_id
+ - :attachment
+ - :line_code
+ - :commit_id
+ - :system
+ - :st_diff
+ - :updated_by_id
+ - :type
+ - :position
+ - :original_position
+ - :change_position
+ - :resolved_at
+ - :resolved_by_id
+ - :resolved_by_push
+ - :discussion_id
+ - :confidential
+ - :last_edited_at
+ push_event_payload:
+ - :commit_count
+ - :action
+ - :ref_type
+ - :commit_from
+ - :commit_to
+ - :ref
+ - :commit_title
+ - :ref_count
+ resource_label_events:
+ - :action
+ - :user_id
+ - :created_at
+ suggestions:
+ - :relative_order
+ - :applied
+ - :commit_id
+ - :from_content
+ - :to_content
+ - :outdated
+ - :lines_above
+ - :lines_below
+ system_note_metadata:
+ - :commit_count
+ - :action
+ - :created_at
+ - :updated_at
+ timelogs:
+ - :time_spent
+ - :user_id
+ - :project_id
+ - :spent_at
+ - :created_at
+ - :updated_at
+ - :summary
+ external_pull_request: &external_pull_request_definition
+ - :created_at
+ - :updated_at
+ - :project_id
+ - :pull_request_iid
+ - :status
+ - :source_branch
+ - :target_branch
+ - :source_repository
+ - :target_repository
+ - :source_sha
+ - :target_sha
+ external_pull_requests: *external_pull_request_definition
+ statuses:
+ - :project_id
+ - :status
+ - :finished_at
+ - :created_at
+ - :updated_at
+ - :started_at
+ - :coverage
+ - :commit_id
+ - :name
+ - :options
+ - :allow_failure
+ - :stage
+ - :stage_idx
+ - :tag
+ - :ref
+ - :user_id
+ - :type
+ - :target_url
+ - :description
+ - :erased_at
+ - :artifacts_expire_at
+ - :environment
+ - :yaml_variables
+ - :queued_at
+ - :lock_version
+ - :coverage_regex
+ - :retried
+ - :protected
+ - :failure_reason
+ - :scheduled_at
+ - :scheduling_type
+ ci_pipelines:
+ - :ref
+ - :sha
+ - :before_sha
+ - :created_at
+ - :updated_at
+ - :tag
+ - :yaml_errors
+ - :committed_at
+ - :project_id
+ - :status
+ - :started_at
+ - :finished_at
+ - :duration
+ - :user_id
+ - :lock_version
+ - :source
+ - :protected
+ - :config_source
+ - :failure_reason
+ - :iid
+ - :source_sha
+ - :target_sha
+ stages:
+ - :name
+ - :status
+ - :position
+ - :lock_version
+ - :project_id
+ - :created_at
+ - :updated_at
+ actions:
+ - :event
+ - :image_v432x230
+ design: &design_definition
+ - :iid
+ - :project_id
+ - :filename
+ - :relative_position
+ designs: *design_definition
+ design_versions:
+ - :created_at
+ - :sha
+ - :author_id
+ issue_assignees:
+ - :user_id
+ sentry_issue:
+ - :sentry_issue_identifier
+ zoom_meetings:
+ - :project_id
+ - :issue_status
+ - :url
+ - :created_at
+ - :updated_at
+ issues:
+ - :title
+ - :author_id
+ - :project_id
+ - :created_at
+ - :updated_at
+ - :description
+ - :state
+ - :iid
+ - :updated_by_id
+ - :confidential
+ - :closed_at
+ - :closed_by_id
+ - :due_date
+ - :lock_version
+ - :weight
+ - :time_estimate
+ - :relative_position
+ - :external_author
+ - :last_edited_at
+ - :last_edited_by_id
+ - :discussion_locked
+ - :health_status
+ - :external_key
+ - :issue_type
+ group_members:
+ - :access_level
+ - :source_type
+ - :user_id
+ - :notification_level
+ - :created_at
+ - :updated_at
+ - :created_by_id
+ - :invite_email
+ - :invite_accepted_at
+ - :requested_at
+ - :expires_at
+ - :ldap
+ - :override
# Do not include the following attributes for the models specified.
excluded_attributes:
@@ -387,16 +747,7 @@ excluded_attributes:
- :service_desk_reply_to
- :upvotes_count
- :work_item_type_id
- merge_request:
- - :milestone_id
- - :sprint_id
- - :ref_fetched
- - :merge_jid
- - :rebase_jid
- - :latest_merge_request_diff_id
- - :head_pipeline_id
- - :state_id
- merge_requests:
+ merge_request: &merge_request_excluded_definition
- :milestone_id
- :sprint_id
- :ref_fetched
@@ -405,6 +756,7 @@ excluded_attributes:
- :latest_merge_request_diff_id
- :head_pipeline_id
- :state_id
+ merge_requests: *merge_request_excluded_definition
award_emoji:
- :awardable_id
statuses:
@@ -473,10 +825,9 @@ excluded_attributes:
- :issue_id
zoom_meetings:
- :issue_id
- design:
- - :issue_id
- designs:
+ design: &design_excluded_definition
- :issue_id
+ designs: *design_excluded_definition
design_versions:
- :issue_id
actions:
@@ -660,4 +1011,13 @@ ee:
- :name
- :created_at
- :updated_at
-
+ project_feature:
+ - :requirements_access_level
+ security_setting:
+ - :project_id
+ - :created_at
+ - :updated_at
+ - :auto_fix_container_scanning
+ - :auto_fix_dast
+ - :auto_fix_dependency_scanning
+ - :auto_fix_sast
diff --git a/lib/gitlab/import_export/project/object_builder.rb b/lib/gitlab/import_export/project/object_builder.rb
index b03dceba303..f7598ba1337 100644
--- a/lib/gitlab/import_export/project/object_builder.rb
+++ b/lib/gitlab/import_export/project/object_builder.rb
@@ -29,6 +29,7 @@ module Gitlab
def find
return if epic? && group.nil?
return find_diff_commit_user if diff_commit_user?
+ return find_diff_commit if diff_commit?
super
end
@@ -83,9 +84,38 @@ module Gitlab
end
def find_diff_commit_user
- find_with_cache do
- MergeRequest::DiffCommitUser
- .find_or_create(@attributes['name'], @attributes['email'])
+ find_or_create_diff_commit_user(@attributes['name'], @attributes['email'])
+ end
+
+ def find_diff_commit
+ row = @attributes.dup
+
+ # Diff commits come in two formats:
+ #
+ # 1. The old format where author/committer details are separate fields
+ # 2. The new format where author/committer details are nested objects,
+ # and pre-processed by `find_diff_commit_user`.
+ #
+ # The code here ensures we support both the old and new format.
+ aname = row.delete('author_name')
+ amail = row.delete('author_email')
+ cname = row.delete('committer_name')
+ cmail = row.delete('committer_email')
+ author = row.delete('commit_author')
+ committer = row.delete('committer')
+
+ row['commit_author'] = author ||
+ find_or_create_diff_commit_user(aname, amail)
+
+ row['committer'] = committer ||
+ find_or_create_diff_commit_user(cname, cmail)
+
+ MergeRequestDiffCommit.new(row)
+ end
+
+ def find_or_create_diff_commit_user(name, email)
+ find_with_cache([MergeRequest::DiffCommitUser, name, email]) do
+ MergeRequest::DiffCommitUser.find_or_create(name, email)
end
end
@@ -113,6 +143,10 @@ module Gitlab
klass == MergeRequest::DiffCommitUser
end
+ def diff_commit?
+ klass == MergeRequestDiffCommit
+ end
+
# If an existing group milestone used the IID
# claim the IID back and set the group milestone to use one available
# This is necessary to fix situations like the following:
diff --git a/lib/gitlab/import_export/project/relation_factory.rb b/lib/gitlab/import_export/project/relation_factory.rb
index 888a5a10f2c..d84db92fe69 100644
--- a/lib/gitlab/import_export/project/relation_factory.rb
+++ b/lib/gitlab/import_export/project/relation_factory.rb
@@ -33,7 +33,8 @@ module Gitlab
links: 'Releases::Link',
metrics_setting: 'ProjectMetricsSetting',
commit_author: 'MergeRequest::DiffCommitUser',
- committer: 'MergeRequest::DiffCommitUser' }.freeze
+ committer: 'MergeRequest::DiffCommitUser',
+ merge_request_diff_commits: 'MergeRequestDiffCommit' }.freeze
BUILD_MODELS = %i[Ci::Build commit_status].freeze
@@ -59,6 +60,7 @@ module Gitlab
external_pull_requests
DesignManagement::Design
MergeRequest::DiffCommitUser
+ MergeRequestDiffCommit
].freeze
def create
diff --git a/lib/gitlab/import_export/project/relation_tree_restorer.rb b/lib/gitlab/import_export/project/relation_tree_restorer.rb
new file mode 100644
index 00000000000..6e9548f393a
--- /dev/null
+++ b/lib/gitlab/import_export/project/relation_tree_restorer.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ module Project
+ class RelationTreeRestorer < ImportExport::Group::RelationTreeRestorer
+ # Relations which cannot be saved at project level (and have a group assigned)
+ GROUP_MODELS = [GroupLabel, Milestone, Epic].freeze
+
+ private
+
+ def bulk_insert_enabled
+ true
+ end
+
+ def modify_attributes
+ @importable.reconcile_shared_runners_setting!
+ @importable.drop_visibility_level!
+ end
+
+ def relation_invalid_for_importable?(relation_object)
+ GROUP_MODELS.include?(relation_object.class) && relation_object.group_id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb b/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb
index 4db92b12968..034122a9f14 100644
--- a/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb
+++ b/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb
@@ -4,7 +4,7 @@ module Gitlab
module ImportExport
module Project
module Sample
- class RelationTreeRestorer < ImportExport::RelationTreeRestorer
+ class RelationTreeRestorer < ImportExport::Project::RelationTreeRestorer
def initialize(...)
super(...)
@@ -18,10 +18,10 @@ module Gitlab
end
def dates
- return [] if relation_reader.legacy?
+ return [] if @relation_reader.legacy?
RelationFactory::DATE_MODELS.flat_map do |tag|
- relation_reader.consume_relation(@importable_path, tag, mark_as_consumed: false).map do |model|
+ @relation_reader.consume_relation(@importable_path, tag, mark_as_consumed: false).map do |model|
model.first['due_date']
end
end
diff --git a/lib/gitlab/import_export/project/tree_saver.rb b/lib/gitlab/import_export/project/tree_saver.rb
index 1f0fa249390..aafed850afa 100644
--- a/lib/gitlab/import_export/project/tree_saver.rb
+++ b/lib/gitlab/import_export/project/tree_saver.rb
@@ -6,20 +6,16 @@ module Gitlab
class TreeSaver
attr_reader :full_path
- def initialize(project:, current_user:, shared:, params: {})
+ def initialize(project:, current_user:, shared:, params: {}, logger: Gitlab::Import::Logger)
@params = params
@project = project
@current_user = current_user
@shared = shared
+ @logger = logger
end
def save
- ImportExport::Json::StreamingSerializer.new(
- exportable,
- reader.project_tree,
- json_writer,
- exportable_path: "project"
- ).execute
+ stream_export
true
rescue StandardError => e
@@ -31,6 +27,32 @@ module Gitlab
private
+ def stream_export
+ on_retry = proc do |exception, try, elapsed_time, next_interval|
+ @logger.info(
+ message: "Project export retry triggered from streaming",
+ 'error.class': exception.class,
+ 'error.message': exception.message,
+ try_count: try,
+ elapsed_time_s: elapsed_time,
+ wait_to_retry_s: next_interval,
+ project_name: @project.name,
+ project_id: @project.id
+ )
+ end
+
+ serializer = ImportExport::Json::StreamingSerializer.new(
+ exportable,
+ reader.project_tree,
+ json_writer,
+ exportable_path: "project"
+ )
+
+ Retriable.retriable(on: Net::OpenTimeout, on_retry: on_retry) do
+ serializer.execute
+ end
+ end
+
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
diff --git a/lib/gitlab/import_export/relation_tree_restorer.rb b/lib/gitlab/import_export/relation_tree_restorer.rb
deleted file mode 100644
index 1eeacafef53..00000000000
--- a/lib/gitlab/import_export/relation_tree_restorer.rb
+++ /dev/null
@@ -1,280 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module ImportExport
- class RelationTreeRestorer
- # Relations which cannot be saved at project level (and have a group assigned)
- GROUP_MODELS = [GroupLabel, Milestone, Epic].freeze
-
- attr_reader :user
- attr_reader :shared
- attr_reader :importable
- attr_reader :relation_reader
-
- def initialize( # rubocop:disable Metrics/ParameterLists
- user:, shared:, relation_reader:,
- members_mapper:, object_builder:,
- relation_factory:,
- reader:,
- importable:,
- importable_attributes:,
- importable_path:
- )
- @user = user
- @shared = shared
- @importable = importable
- @relation_reader = relation_reader
- @members_mapper = members_mapper
- @object_builder = object_builder
- @relation_factory = relation_factory
- @reader = reader
- @importable_attributes = importable_attributes
- @importable_path = importable_path
- end
-
- def restore
- ActiveRecord::Base.uncached do
- ActiveRecord::Base.no_touching do
- update_params!
-
- BulkInsertableAssociations.with_bulk_insert(enabled: project?) do
- fix_ci_pipelines_not_sorted_on_legacy_project_json!
- create_relations!
- end
- end
- end
-
- # ensure that we have latest version of the restore
- @importable.reload # rubocop:disable Cop/ActiveRecordAssociationReload
-
- true
- rescue StandardError => e
- @shared.error(e)
- false
- end
-
- private
-
- def project?
- @importable.instance_of?(::Project)
- end
-
- # Loops through the tree of models defined in import_export.yml and
- # finds them in the imported JSON so they can be instantiated and saved
- # in the DB. The structure and relationships between models are guessed from
- # the configuration yaml file too.
- # Finally, it updates each attribute in the newly imported project/group.
- def create_relations!
- relations.each do |relation_key, relation_definition|
- process_relation!(relation_key, relation_definition)
- end
- end
-
- def process_relation!(relation_key, relation_definition)
- @relation_reader.consume_relation(@importable_path, relation_key).each do |data_hash, relation_index|
- process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
- end
- end
-
- def process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
- relation_object = build_relation(relation_key, relation_definition, relation_index, data_hash)
- return unless relation_object
- return if project? && group_model?(relation_object)
-
- relation_object.assign_attributes(importable_class_sym => @importable)
-
- import_failure_service.with_retry(action: 'relation_object.save!', relation_key: relation_key, relation_index: relation_index) do
- relation_object.save!
- log_relation_creation(@importable, relation_key, relation_object)
- end
- rescue StandardError => e
- import_failure_service.log_import_failure(
- source: 'process_relation_item!',
- relation_key: relation_key,
- relation_index: relation_index,
- exception: e)
- end
-
- def import_failure_service
- @import_failure_service ||= ImportFailureService.new(@importable)
- end
-
- def relations
- @relations ||=
- @reader
- .attributes_finder
- .find_relations_tree(importable_class_sym)
- .deep_stringify_keys
- end
-
- def update_params!
- params = @importable_attributes.except(*relations.keys.map(&:to_s))
- params = params.merge(present_override_params)
-
- # Cleaning all imported and overridden params
- params = Gitlab::ImportExport::AttributeCleaner.clean(
- relation_hash: params,
- relation_class: importable_class,
- excluded_keys: excluded_keys_for_relation(importable_class_sym))
-
- @importable.assign_attributes(params)
-
- modify_attributes
-
- Gitlab::Timeless.timeless(@importable) do
- @importable.save!
- end
- end
-
- def present_override_params
- # we filter out the empty strings from the overrides
- # keeping the default values configured
- override_params&.transform_values do |value|
- value.is_a?(String) ? value.presence : value
- end&.compact
- end
-
- def override_params
- @importable_override_params ||= importable_override_params
- end
-
- def importable_override_params
- if @importable.respond_to?(:import_data)
- @importable.import_data&.data&.fetch('override_params', nil) || {}
- else
- {}
- end
- end
-
- def modify_attributes
- return unless project?
-
- @importable.reconcile_shared_runners_setting!
- @importable.drop_visibility_level!
- end
-
- def build_relations(relation_key, relation_definition, relation_index, data_hashes)
- data_hashes
- .map { |data_hash| build_relation(relation_key, relation_definition, relation_index, data_hash) }
- .tap { |entries| entries.compact! }
- end
-
- def build_relation(relation_key, relation_definition, relation_index, data_hash)
- # TODO: This is hack to not create relation for the author
- # Rather make `RelationFactory#set_note_author` to take care of that
- return data_hash if relation_key == 'author' || already_restored?(data_hash)
-
- # create relation objects recursively for all sub-objects
- relation_definition.each do |sub_relation_key, sub_relation_definition|
- transform_sub_relations!(data_hash, sub_relation_key, sub_relation_definition, relation_index)
- end
-
- relation = @relation_factory.create(**relation_factory_params(relation_key, relation_index, data_hash))
-
- if relation && !relation.valid?
- @shared.logger.warn(
- message: "[Project/Group Import] Invalid object relation built",
- relation_key: relation_key,
- relation_index: relation_index,
- relation_class: relation.class.name,
- error_messages: relation.errors.full_messages.join(". ")
- )
- end
-
- relation
- end
-
- # Since we update the data hash in place as we restore relation items,
- # and since we also de-duplicate items, we might encounter items that
- # have already been restored in a previous iteration.
- def already_restored?(relation_item)
- !relation_item.is_a?(Hash)
- end
-
- def transform_sub_relations!(data_hash, sub_relation_key, sub_relation_definition, relation_index)
- sub_data_hash = data_hash[sub_relation_key]
- return unless sub_data_hash
-
- # if object is a hash we can create simple object
- # as it means that this is 1-to-1 vs 1-to-many
- current_item =
- if sub_data_hash.is_a?(Array)
- build_relations(
- sub_relation_key,
- sub_relation_definition,
- relation_index,
- sub_data_hash).presence
- else
- build_relation(
- sub_relation_key,
- sub_relation_definition,
- relation_index,
- sub_data_hash)
- end
-
- if current_item
- data_hash[sub_relation_key] = current_item
- else
- data_hash.delete(sub_relation_key)
- end
- end
-
- def group_model?(relation_object)
- GROUP_MODELS.include?(relation_object.class) && relation_object.group_id
- end
-
- def excluded_keys_for_relation(relation)
- @reader.attributes_finder.find_excluded_keys(relation)
- end
-
- def importable_class
- @importable.class
- end
-
- def importable_class_sym
- importable_class.to_s.downcase.to_sym
- end
-
- def relation_factory_params(relation_key, relation_index, data_hash)
- {
- relation_index: relation_index,
- relation_sym: relation_key.to_sym,
- relation_hash: data_hash,
- importable: @importable,
- members_mapper: @members_mapper,
- object_builder: @object_builder,
- user: @user,
- excluded_keys: excluded_keys_for_relation(relation_key)
- }
- end
-
- # Temporary fix for https://gitlab.com/gitlab-org/gitlab/-/issues/27883 when import from legacy project.json
- # This should be removed once legacy JSON format is deprecated.
- # Ndjson export file will fix the order during project export.
- def fix_ci_pipelines_not_sorted_on_legacy_project_json!
- return unless relation_reader.legacy?
-
- relation_reader.sort_ci_pipelines_by_id
- end
-
- # Enable logging of each top-level relation creation when Importing
- # into a Group if feature flag is enabled
- def log_relation_creation(importable, relation_key, relation_object)
- root_ancestor_group = importable.try(:root_ancestor)
-
- return unless root_ancestor_group
- return unless root_ancestor_group.instance_of?(::Group)
- return unless Feature.enabled?(:log_import_export_relation_creation, root_ancestor_group)
-
- @shared.logger.info(
- importable_type: importable.class.to_s,
- importable_id: importable.id,
- relation_key: relation_key,
- relation_id: relation_object.id,
- author_id: relation_object.try(:author_id),
- message: '[Project/Group Import] Created new object relation'
- )
- end
- end
- end
-end
diff --git a/lib/gitlab/instrumentation/redis_interceptor.rb b/lib/gitlab/instrumentation/redis_interceptor.rb
index ba25e54ac9f..14474693ddf 100644
--- a/lib/gitlab/instrumentation/redis_interceptor.rb
+++ b/lib/gitlab/instrumentation/redis_interceptor.rb
@@ -5,10 +5,6 @@ module Gitlab
module RedisInterceptor
APDEX_EXCLUDE = %w[brpop blpop brpoplpush bzpopmin bzpopmax xread xreadgroup].freeze
- # These are temporary to help with investigating
- # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1183
- DURATION_ERROR_THRESHOLD = 1.25.seconds
-
class MysteryRedisDurationError < StandardError
attr_reader :backtrace
@@ -19,7 +15,6 @@ module Gitlab
def call(*args, &block)
start = Gitlab::Metrics::System.monotonic_time # must come first so that 'start' is always defined
- start_real_time = Time.now
instrumentation_class.instance_count_request
instrumentation_class.redis_cluster_validate!(args.first)
@@ -40,16 +35,6 @@ module Gitlab
instrumentation_class.add_duration(duration)
instrumentation_class.add_call_details(duration, args)
end
-
- if duration > DURATION_ERROR_THRESHOLD &&
- instrumentation_class == ::Gitlab::Instrumentation::Redis::SharedState &&
- Feature.enabled?(:report_on_long_redis_durations, default_enabled: :yaml)
-
- Gitlab::ErrorTracking.track_exception(MysteryRedisDurationError.new(caller),
- command: command_from_args(args),
- duration: duration,
- timestamp: start_real_time.iso8601(5))
- end
end
def write(command)
diff --git a/lib/gitlab/instrumentation/uploads.rb b/lib/gitlab/instrumentation/uploads.rb
new file mode 100644
index 00000000000..02e457453cd
--- /dev/null
+++ b/lib/gitlab/instrumentation/uploads.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Instrumentation
+ class Uploads
+ UPLOAD_DURATION = :uploaded_file_upload_duration_s
+ UPLOADED_FILE_SIZE = :uploaded_file_size_bytes
+
+ def self.track(uploaded_file)
+ if ::Gitlab::SafeRequestStore.active?
+ ::Gitlab::SafeRequestStore[UPLOAD_DURATION] = uploaded_file.upload_duration
+ ::Gitlab::SafeRequestStore[UPLOADED_FILE_SIZE] = uploaded_file.size
+ end
+ end
+
+ def self.get_upload_duration
+ ::Gitlab::SafeRequestStore[UPLOAD_DURATION]
+ end
+
+ def self.get_uploaded_file_size
+ ::Gitlab::SafeRequestStore[UPLOADED_FILE_SIZE]
+ end
+
+ def self.payload
+ {
+ UPLOAD_DURATION => get_upload_duration,
+ UPLOADED_FILE_SIZE => get_uploaded_file_size
+ }.compact
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/instrumentation_helper.rb b/lib/gitlab/instrumentation_helper.rb
index 26e44d7822e..155e365d04c 100644
--- a/lib/gitlab/instrumentation_helper.rb
+++ b/lib/gitlab/instrumentation_helper.rb
@@ -31,6 +31,7 @@ module Gitlab
instrument_thread_memory_allocations(payload)
instrument_load_balancing(payload)
instrument_pid(payload)
+ instrument_uploads(payload)
end
def instrument_gitaly(payload)
@@ -116,6 +117,10 @@ module Gitlab
payload.merge!(load_balancing_payload)
end
+ def instrument_uploads(payload)
+ payload.merge! ::Gitlab::Instrumentation::Uploads.payload
+ end
+
# Returns the queuing duration for a Sidekiq job in seconds, as a float, if the
# `enqueued_at` field or `created_at` field is available.
#
diff --git a/lib/gitlab/integrations/sti_type.rb b/lib/gitlab/integrations/sti_type.rb
index 91797a7b99b..1350d75b216 100644
--- a/lib/gitlab/integrations/sti_type.rb
+++ b/lib/gitlab/integrations/sti_type.rb
@@ -7,7 +7,7 @@ module Gitlab
Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Irker Jenkins Jira Mattermost
MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
- Prometheus Pushover Redmine Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao
+ Prometheus Pushover Redmine Shimo Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao
)).freeze
def self.namespaced_integrations
diff --git a/lib/gitlab/issues/rebalancing/state.rb b/lib/gitlab/issues/rebalancing/state.rb
index dce165a3489..3d3fd9419b2 100644
--- a/lib/gitlab/issues/rebalancing/state.rb
+++ b/lib/gitlab/issues/rebalancing/state.rb
@@ -4,6 +4,10 @@ module Gitlab
module Issues
module Rebalancing
class State
+ REDIS_KEY_PREFIX = "gitlab:issues-position-rebalances"
+ CONCURRENT_RUNNING_REBALANCES_KEY = "#{REDIS_KEY_PREFIX}:running_rebalances"
+ RECENTLY_FINISHED_REBALANCE_PREFIX = "#{REDIS_KEY_PREFIX}:recently_finished"
+
REDIS_EXPIRY_TIME = 10.days
MAX_NUMBER_OF_CONCURRENT_REBALANCES = 5
NAMESPACE = 1
@@ -21,25 +25,23 @@ module Gitlab
redis.multi do |multi|
# we trigger re-balance for namespaces(groups) or specific user project
value = "#{rebalanced_container_type}/#{rebalanced_container_id}"
- multi.sadd(concurrent_running_rebalances_key, value)
- multi.expire(concurrent_running_rebalances_key, REDIS_EXPIRY_TIME)
+ multi.sadd(CONCURRENT_RUNNING_REBALANCES_KEY, value)
+ multi.expire(CONCURRENT_RUNNING_REBALANCES_KEY, REDIS_EXPIRY_TIME)
end
end
end
def concurrent_running_rebalances_count
- with_redis { |redis| redis.scard(concurrent_running_rebalances_key).to_i }
+ with_redis { |redis| redis.scard(CONCURRENT_RUNNING_REBALANCES_KEY).to_i }
end
def rebalance_in_progress?
- all_rebalanced_containers = with_redis { |redis| redis.smembers(concurrent_running_rebalances_key) }
-
is_running = case rebalanced_container_type
when NAMESPACE
- namespace_ids = all_rebalanced_containers.map {|string| string.split("#{NAMESPACE}/").second.to_i }.compact
+ namespace_ids = self.class.current_rebalancing_containers.map {|string| string.split("#{NAMESPACE}/").second.to_i }.compact
namespace_ids.include?(root_namespace.id)
when PROJECT
- project_ids = all_rebalanced_containers.map {|string| string.split("#{PROJECT}/").second.to_i }.compact
+ project_ids = self.class.current_rebalancing_containers.map {|string| string.split("#{PROJECT}/").second.to_i }.compact
project_ids.include?(projects.take.id) # rubocop:disable CodeReuse/ActiveRecord
else
false
@@ -101,36 +103,63 @@ module Gitlab
multi.expire(issue_ids_key, REDIS_EXPIRY_TIME)
multi.expire(current_index_key, REDIS_EXPIRY_TIME)
multi.expire(current_project_key, REDIS_EXPIRY_TIME)
- multi.expire(concurrent_running_rebalances_key, REDIS_EXPIRY_TIME)
+ multi.expire(CONCURRENT_RUNNING_REBALANCES_KEY, REDIS_EXPIRY_TIME)
end
end
end
def cleanup_cache
+ value = "#{rebalanced_container_type}/#{rebalanced_container_id}"
+
with_redis do |redis|
redis.multi do |multi|
multi.del(issue_ids_key)
multi.del(current_index_key)
multi.del(current_project_key)
- multi.srem(concurrent_running_rebalances_key, "#{rebalanced_container_type}/#{rebalanced_container_id}")
+ multi.srem(CONCURRENT_RUNNING_REBALANCES_KEY, value)
+ multi.set(self.class.recently_finished_key(rebalanced_container_type, rebalanced_container_id), true, ex: 1.hour)
end
end
end
+ def self.rebalance_recently_finished?(project_id, namespace_id)
+ container_id = project_id || namespace_id
+ container_type = project_id.present? ? PROJECT : NAMESPACE
+
+ Gitlab::Redis::SharedState.with { |redis| redis.get(recently_finished_key(container_type, container_id)) }
+ end
+
+ def self.fetch_rebalancing_groups_and_projects
+ namespace_ids = []
+ project_ids = []
+
+ current_rebalancing_containers.each do |string|
+ container_type, container_id = string.split('/', 2).map(&:to_i)
+
+ if container_type == NAMESPACE
+ namespace_ids << container_id
+ elsif container_type == PROJECT
+ project_ids << container_id
+ end
+ end
+
+ [namespace_ids, project_ids]
+ end
+
private
+ def self.current_rebalancing_containers
+ Gitlab::Redis::SharedState.with { |redis| redis.smembers(CONCURRENT_RUNNING_REBALANCES_KEY) }
+ end
+
attr_accessor :root_namespace, :projects, :rebalanced_container_type, :rebalanced_container_id
def too_many_rebalances_running?
concurrent_running_rebalances_count <= MAX_NUMBER_OF_CONCURRENT_REBALANCES
end
- def redis_key_prefix
- "gitlab:issues-position-rebalances"
- end
-
def issue_ids_key
- "#{redis_key_prefix}:#{root_namespace.id}"
+ "#{REDIS_KEY_PREFIX}:#{root_namespace.id}"
end
def current_index_key
@@ -141,8 +170,8 @@ module Gitlab
"#{issue_ids_key}:current_project_id"
end
- def concurrent_running_rebalances_key
- "#{redis_key_prefix}:running_rebalances"
+ def self.recently_finished_key(container_type, container_id)
+ "#{RECENTLY_FINISHED_REBALANCE_PREFIX}:#{container_type}:#{container_id}"
end
def with_redis(&blk)
diff --git a/lib/gitlab/jira/http_client.rb b/lib/gitlab/jira/http_client.rb
index 13d3bb2b8dc..7abfe8e38e8 100644
--- a/lib/gitlab/jira/http_client.rb
+++ b/lib/gitlab/jira/http_client.rb
@@ -32,7 +32,6 @@ module Gitlab
request_params = { headers: headers }
request_params[:body] = body if body.present?
request_params[:headers][:Cookie] = get_cookies if options[:use_cookies]
- request_params[:timeout] = options[:read_timeout] if options[:read_timeout]
request_params[:base_uri] = uri.to_s
request_params.merge!(auth_params)
diff --git a/lib/gitlab/language_detection.rb b/lib/gitlab/language_detection.rb
index fc9fb5caa09..6f7fa9fe03b 100644
--- a/lib/gitlab/language_detection.rb
+++ b/lib/gitlab/language_detection.rb
@@ -18,7 +18,7 @@ module Gitlab
end
# Newly detected languages, returned in a structure accepted by
- # Gitlab::Database.main.bulk_insert
+ # ApplicationRecord.legacy_bulk_insert
def insertions(programming_languages)
lang_to_id = programming_languages.to_h { |p| [p.name, p.id] }
diff --git a/lib/gitlab/lfs_token.rb b/lib/gitlab/lfs_token.rb
index 2e8564b6e00..03655eb7237 100644
--- a/lib/gitlab/lfs_token.rb
+++ b/lib/gitlab/lfs_token.rb
@@ -96,24 +96,15 @@ module Gitlab
attr_reader :actor
def secret
- salt + key
- end
-
- def salt
case actor
when DeployKey, Key
- actor.fingerprint.delete(':').first(16)
+ # Since fingerprint is based on the public key, let's take more bytes from attr_encrypted_db_key_base
+ actor.fingerprint.delete(':').first(16) + Settings.attr_encrypted_db_key_base_32
when User
# Take the last 16 characters as they're more unique than the first 16
- actor.id.to_s + actor.encrypted_password.last(16)
+ actor.id.to_s + actor.encrypted_password.last(16) + Settings.attr_encrypted_db_key_base.first(16)
end
end
-
- def key
- # Take 16 characters of attr_encrypted_db_key_base, as that's what the
- # cipher needs exactly
- Settings.attr_encrypted_db_key_base.first(16)
- end
end
end
end
diff --git a/lib/gitlab/lograge/custom_options.rb b/lib/gitlab/lograge/custom_options.rb
index 83fd74310d0..e6c9ba0773c 100644
--- a/lib/gitlab/lograge/custom_options.rb
+++ b/lib/gitlab/lograge/custom_options.rb
@@ -7,6 +7,8 @@ module Gitlab
LIMITED_ARRAY_SENTINEL = { key: 'truncated', value: '...' }.freeze
IGNORE_PARAMS = Set.new(%w(controller action format)).freeze
+ KNOWN_PAYLOAD_PARAMS = [:remote_ip, :user_id, :username, :ua, :queue_duration_s,
+ :etag_route, :request_urgency, :target_duration_s] + CLOUDFLARE_CUSTOM_HEADERS.values
def self.call(event)
params = event
@@ -14,24 +16,17 @@ module Gitlab
.each_with_object([]) { |(k, v), array| array << { key: k, value: v } unless IGNORE_PARAMS.include?(k) }
payload = {
time: Time.now.utc.iso8601(3),
- params: Gitlab::Utils::LogLimitedArray.log_limited_array(params, sentinel: LIMITED_ARRAY_SENTINEL),
- remote_ip: event.payload[:remote_ip],
- user_id: event.payload[:user_id],
- username: event.payload[:username],
- ua: event.payload[:ua]
+ params: Gitlab::Utils::LogLimitedArray.log_limited_array(params, sentinel: LIMITED_ARRAY_SENTINEL)
}
+
payload.merge!(event.payload[:metadata]) if event.payload[:metadata]
+ optional_payload_params = event.payload.slice(*KNOWN_PAYLOAD_PARAMS).compact
+ payload.merge!(optional_payload_params)
::Gitlab::InstrumentationHelper.add_instrumentation_data(payload)
- payload[:queue_duration_s] = event.payload[:queue_duration_s] if event.payload[:queue_duration_s]
- payload[:etag_route] = event.payload[:etag_route] if event.payload[:etag_route]
payload[Labkit::Correlation::CorrelationId::LOG_KEY] = event.payload[Labkit::Correlation::CorrelationId::LOG_KEY] || Labkit::Correlation::CorrelationId.current_id
- CLOUDFLARE_CUSTOM_HEADERS.each do |_, value|
- payload[value] = event.payload[value] if event.payload[value]
- end
-
# https://github.com/roidrage/lograge#logging-errors--exceptions
exception = event.payload[:exception_object]
diff --git a/lib/gitlab/merge_requests/merge_commit_message.rb b/lib/gitlab/merge_requests/merge_commit_message.rb
new file mode 100644
index 00000000000..2a6a7859b33
--- /dev/null
+++ b/lib/gitlab/merge_requests/merge_commit_message.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+module Gitlab
+ module MergeRequests
+ class MergeCommitMessage
+ def initialize(merge_request:)
+ @merge_request = merge_request
+ end
+
+ def message
+ return unless @merge_request.target_project.merge_commit_template.present?
+
+ message = @merge_request.target_project.merge_commit_template
+ message = message.delete("\r")
+
+ # Remove placeholders that correspond to empty values and are the last word in the line
+ # along with all whitespace characters preceding them.
+ # This allows us to recreate previous default merge commit message behaviour - we skipped new line character
+ # before empty description and before closed issues when none were present.
+ PLACEHOLDERS.each do |key, value|
+ unless value.call(merge_request).present?
+ message = message.gsub(BLANK_PLACEHOLDERS_REGEXES[key], '')
+ end
+ end
+
+ Gitlab::StringPlaceholderReplacer
+ .replace_string_placeholders(message, PLACEHOLDERS_REGEX) do |key|
+ PLACEHOLDERS[key].call(merge_request)
+ end
+ end
+
+ private
+
+ attr_reader :merge_request
+
+ PLACEHOLDERS = {
+ 'source_branch' => ->(merge_request) { merge_request.source_branch.to_s },
+ 'target_branch' => ->(merge_request) { merge_request.target_branch.to_s },
+ 'title' => ->(merge_request) { merge_request.title },
+ 'issues' => ->(merge_request) do
+ return "" if merge_request.visible_closing_issues_for.blank?
+
+ closes_issues_references = merge_request.visible_closing_issues_for.map do |issue|
+ issue.to_reference(merge_request.target_project)
+ end
+ "Closes #{closes_issues_references.to_sentence}"
+ end,
+ 'description' => ->(merge_request) { merge_request.description.presence || '' },
+ 'reference' => ->(merge_request) { merge_request.to_reference(full: true) }
+ }.freeze
+
+ PLACEHOLDERS_REGEX = Regexp.union(PLACEHOLDERS.keys.map do |key|
+ Regexp.new(Regexp.escape(key))
+ end).freeze
+
+ BLANK_PLACEHOLDERS_REGEXES = (PLACEHOLDERS.map do |key, value|
+ [key, Regexp.new("[\n\r]+%{#{Regexp.escape(key)}}$")]
+ end).to_h.freeze
+ end
+ end
+end
diff --git a/lib/gitlab/metrics.rb b/lib/gitlab/metrics.rb
index 4c4942c12d5..6d7ecb53ec3 100644
--- a/lib/gitlab/metrics.rb
+++ b/lib/gitlab/metrics.rb
@@ -29,7 +29,7 @@ module Gitlab
# Allow access from other metrics related middlewares
def self.current_transaction
- Transaction.current
+ WebTransaction.current || BackgroundTransaction.current
end
# Returns the prefix to use for the name of a series.
diff --git a/lib/gitlab/metrics/background_transaction.rb b/lib/gitlab/metrics/background_transaction.rb
index a1fabe75a97..54095461dd4 100644
--- a/lib/gitlab/metrics/background_transaction.rb
+++ b/lib/gitlab/metrics/background_transaction.rb
@@ -2,14 +2,17 @@
module Gitlab
module Metrics
+ # Exclusive transaction-type metrics for background jobs (Sidekiq). One
+ # instance of this class is created for each job going through the Sidekiq
+ # metric middleware. Any metrics dispatched with this instance include
+ # metadata such as endpoint_id, queue, and feature category.
class BackgroundTransaction < Transaction
- # Separate web transaction instance and background transaction instance
- BACKGROUND_THREAD_KEY = :_gitlab_metrics_background_transaction
- BACKGROUND_BASE_LABEL_KEYS = %i(endpoint_id feature_category).freeze
+ THREAD_KEY = :_gitlab_metrics_background_transaction
+ BASE_LABEL_KEYS = %i(queue endpoint_id feature_category).freeze
class << self
def current
- Thread.current[BACKGROUND_THREAD_KEY]
+ Thread.current[THREAD_KEY]
end
def prometheus_metric(name, type, &block)
@@ -19,17 +22,17 @@ module Gitlab
evaluate(&block)
# always filter sensitive labels and merge with base ones
- label_keys BACKGROUND_BASE_LABEL_KEYS | (label_keys - ::Gitlab::Metrics::Transaction::FILTERED_LABEL_KEYS)
+ label_keys BASE_LABEL_KEYS | (label_keys - ::Gitlab::Metrics::Transaction::FILTERED_LABEL_KEYS)
end
end
end
def run
- Thread.current[BACKGROUND_THREAD_KEY] = self
+ Thread.current[THREAD_KEY] = self
yield
ensure
- Thread.current[BACKGROUND_THREAD_KEY] = nil
+ Thread.current[THREAD_KEY] = nil
end
def labels
diff --git a/lib/gitlab/metrics/methods.rb b/lib/gitlab/metrics/methods.rb
index 8ddd76ad7ae..dc9a7ed1312 100644
--- a/lib/gitlab/metrics/methods.rb
+++ b/lib/gitlab/metrics/methods.rb
@@ -13,8 +13,12 @@ module Gitlab
end
class_methods do
- def reload_metric!(name)
- @@_metrics_provider_cache.delete(name)
+ def reload_metric!(name = nil)
+ if name.nil?
+ @@_metrics_provider_cache = {}
+ else
+ @@_metrics_provider_cache.delete(name)
+ end
end
private
diff --git a/lib/gitlab/metrics/rails_slis.rb b/lib/gitlab/metrics/rails_slis.rb
index 69e0c1e9fde..8c40c0ad441 100644
--- a/lib/gitlab/metrics/rails_slis.rb
+++ b/lib/gitlab/metrics/rails_slis.rb
@@ -4,23 +4,32 @@ module Gitlab
module Metrics
module RailsSlis
class << self
- def request_apdex_counters_enabled?
- Feature.enabled?(:request_apdex_counters)
- end
-
def initialize_request_slis_if_needed!
- return unless request_apdex_counters_enabled?
- return if Gitlab::Metrics::Sli.initialized?(:rails_request_apdex)
-
- Gitlab::Metrics::Sli.initialize_sli(:rails_request_apdex, possible_request_labels)
+ Gitlab::Metrics::Sli.initialize_sli(:rails_request_apdex, possible_request_labels) unless Gitlab::Metrics::Sli.initialized?(:rails_request_apdex)
+ Gitlab::Metrics::Sli.initialize_sli(:graphql_query_apdex, possible_graphql_query_labels) unless Gitlab::Metrics::Sli.initialized?(:graphql_query_apdex)
end
def request_apdex
Gitlab::Metrics::Sli[:rails_request_apdex]
end
+ def graphql_query_apdex
+ Gitlab::Metrics::Sli[:graphql_query_apdex]
+ end
+
private
+ def possible_graphql_query_labels
+ ::Gitlab::Graphql::KnownOperations.default.operations.map do |op|
+ {
+ endpoint_id: op.to_caller_id,
+ # We'll be able to correlate feature_category with https://gitlab.com/gitlab-org/gitlab/-/issues/328535
+ feature_category: nil,
+ query_urgency: op.query_urgency.name
+ }
+ end
+ end
+
def possible_request_labels
possible_controller_labels + possible_api_labels
end
@@ -30,10 +39,12 @@ module Gitlab
endpoint_id = API::Base.endpoint_id_for_route(route)
route_class = route.app.options[:for]
feature_category = route_class.feature_category_for_app(route.app)
+ request_urgency = route_class.urgency_for_app(route.app)
{
endpoint_id: endpoint_id,
- feature_category: feature_category
+ feature_category: feature_category,
+ request_urgency: request_urgency.name
}
end
end
@@ -42,7 +53,8 @@ module Gitlab
Gitlab::RequestEndpoints.all_controller_actions.map do |controller, action|
{
endpoint_id: controller.endpoint_id_for_action(action),
- feature_category: controller.feature_category_for_action(action)
+ feature_category: controller.feature_category_for_action(action),
+ request_urgency: controller.urgency_for_action(action).name
}
end
end
diff --git a/lib/gitlab/metrics/requests_rack_middleware.rb b/lib/gitlab/metrics/requests_rack_middleware.rb
index 3a0e34d5615..c143a7f5a1b 100644
--- a/lib/gitlab/metrics/requests_rack_middleware.rb
+++ b/lib/gitlab/metrics/requests_rack_middleware.rb
@@ -79,7 +79,7 @@ module Gitlab
if !health_endpoint && ::Gitlab::Metrics.record_duration_for_status?(status)
self.class.http_request_duration_seconds.observe({ method: method }, elapsed)
- record_apdex_if_needed(env, elapsed)
+ record_apdex(env, elapsed)
end
[status, headers, body]
@@ -113,12 +113,12 @@ module Gitlab
::Gitlab::ApplicationContext.current_context_attribute(:caller_id)
end
- def record_apdex_if_needed(env, elapsed)
- return unless Gitlab::Metrics::RailsSlis.request_apdex_counters_enabled?
+ def record_apdex(env, elapsed)
+ urgency = urgency_for_env(env)
Gitlab::Metrics::RailsSlis.request_apdex.increment(
- labels: labels_from_context,
- success: satisfactory?(env, elapsed)
+ labels: labels_from_context.merge(request_urgency: urgency.name),
+ success: elapsed < urgency.duration
)
end
@@ -129,17 +129,15 @@ module Gitlab
}
end
- def satisfactory?(env, elapsed)
- target =
+ def urgency_for_env(env)
+ endpoint_urgency =
if env['api.endpoint'].present?
env['api.endpoint'].options[:for].try(:urgency_for_app, env['api.endpoint'])
elsif env['action_controller.instance'].present? && env['action_controller.instance'].respond_to?(:urgency)
env['action_controller.instance'].urgency
end
- target ||= Gitlab::EndpointAttributes::DEFAULT_URGENCY
-
- elapsed < target.duration
+ endpoint_urgency || Gitlab::EndpointAttributes::DEFAULT_URGENCY
end
end
end
diff --git a/lib/gitlab/metrics/samplers/action_cable_sampler.rb b/lib/gitlab/metrics/samplers/action_cable_sampler.rb
index 043d2ae84cc..adce3030d0d 100644
--- a/lib/gitlab/metrics/samplers/action_cable_sampler.rb
+++ b/lib/gitlab/metrics/samplers/action_cable_sampler.rb
@@ -39,23 +39,14 @@ module Gitlab
def sample
pool = @action_cable.worker_pool.executor
- labels = {
- server_mode: server_mode
- }
-
- metrics[:active_connections].set(labels, @action_cable.connections.size)
- metrics[:pool_min_size].set(labels, pool.min_length)
- metrics[:pool_max_size].set(labels, pool.max_length)
- metrics[:pool_current_size].set(labels, pool.length)
- metrics[:pool_largest_size].set(labels, pool.largest_length)
- metrics[:pool_completed_tasks].set(labels, pool.completed_task_count)
- metrics[:pool_pending_tasks].set(labels, pool.queue_length)
- end
-
- private
- def server_mode
- Gitlab::ActionCable::Config.in_app? ? 'in-app' : 'standalone'
+ metrics[:active_connections].set({}, @action_cable.connections.size)
+ metrics[:pool_min_size].set({}, pool.min_length)
+ metrics[:pool_max_size].set({}, pool.max_length)
+ metrics[:pool_current_size].set({}, pool.length)
+ metrics[:pool_largest_size].set({}, pool.largest_length)
+ metrics[:pool_completed_tasks].set({}, pool.completed_task_count)
+ metrics[:pool_pending_tasks].set({}, pool.queue_length)
end
end
end
diff --git a/lib/gitlab/metrics/subscribers/action_view.rb b/lib/gitlab/metrics/subscribers/action_view.rb
index fa129025bfe..bc9032a6942 100644
--- a/lib/gitlab/metrics/subscribers/action_view.rb
+++ b/lib/gitlab/metrics/subscribers/action_view.rb
@@ -40,7 +40,7 @@ module Gitlab
end
def current_transaction
- ::Gitlab::Metrics::Transaction.current
+ ::Gitlab::Metrics::WebTransaction.current
end
end
end
diff --git a/lib/gitlab/metrics/subscribers/external_http.rb b/lib/gitlab/metrics/subscribers/external_http.rb
index 60a1b084345..ff8654a2cec 100644
--- a/lib/gitlab/metrics/subscribers/external_http.rb
+++ b/lib/gitlab/metrics/subscribers/external_http.rb
@@ -43,7 +43,7 @@ module Gitlab
private
def current_transaction
- ::Gitlab::Metrics::Transaction.current
+ ::Gitlab::Metrics::WebTransaction.current || ::Gitlab::Metrics::BackgroundTransaction.current
end
def add_to_detail_store(start, payload)
diff --git a/lib/gitlab/metrics/subscribers/rails_cache.rb b/lib/gitlab/metrics/subscribers/rails_cache.rb
index 45344e79796..b5e087d107b 100644
--- a/lib/gitlab/metrics/subscribers/rails_cache.rb
+++ b/lib/gitlab/metrics/subscribers/rails_cache.rb
@@ -65,7 +65,7 @@ module Gitlab
private
def current_transaction
- ::Gitlab::Metrics::Transaction.current
+ ::Gitlab::Metrics::WebTransaction.current
end
def metric_cache_operation_duration_seconds
diff --git a/lib/gitlab/metrics/transaction.rb b/lib/gitlab/metrics/transaction.rb
index 97cc8bed564..56a310548a7 100644
--- a/lib/gitlab/metrics/transaction.rb
+++ b/lib/gitlab/metrics/transaction.rb
@@ -6,35 +6,14 @@ module Gitlab
class Transaction
include Gitlab::Metrics::Methods
- # base label keys shared among all transactions
- BASE_LABEL_KEYS = %i(controller action feature_category).freeze
# labels that potentially contain sensitive information and will be filtered
FILTERED_LABEL_KEYS = %i(branch path).freeze
- THREAD_KEY = :_gitlab_metrics_transaction
-
# The series to store events (e.g. Git pushes) in.
EVENT_SERIES = 'events'
attr_reader :method
- class << self
- def current
- Thread.current[THREAD_KEY]
- end
-
- def prometheus_metric(name, type, &block)
- fetch_metric(type, name) do
- # set default metric options
- docstring "#{name.to_s.humanize} #{type}"
-
- evaluate(&block)
- # always filter sensitive labels and merge with base ones
- label_keys BASE_LABEL_KEYS | (label_keys - FILTERED_LABEL_KEYS)
- end
- end
- end
-
def initialize
@methods = {}
end
@@ -126,10 +105,6 @@ module Gitlab
histogram.observe(filter_labels(labels), value)
end
- def labels
- BASE_LABEL_KEYS.product([nil]).to_h
- end
-
def filter_labels(labels)
labels.empty? ? self.labels : labels.without(*FILTERED_LABEL_KEYS).merge(self.labels)
end
diff --git a/lib/gitlab/metrics/web_transaction.rb b/lib/gitlab/metrics/web_transaction.rb
index 544c142f7bb..fcfa86734e8 100644
--- a/lib/gitlab/metrics/web_transaction.rb
+++ b/lib/gitlab/metrics/web_transaction.rb
@@ -2,12 +2,37 @@
module Gitlab
module Metrics
+ # Exclusive transaction-type metrics for web servers (including Web/Api/Git
+ # fleet). One instance of this class is created for each request going
+ # through the Rack metric middleware. Any metrics dispatched with this
+ # instance include metadata such as controller, action, feature category,
+ # etc.
class WebTransaction < Transaction
+ THREAD_KEY = :_gitlab_metrics_transaction
+ BASE_LABEL_KEYS = %i(controller action feature_category).freeze
+
CONTROLLER_KEY = 'action_controller.instance'
ENDPOINT_KEY = 'api.endpoint'
ALLOWED_SUFFIXES = Set.new(%w[json js atom rss xml zip])
SMALL_BUCKETS = [0.1, 0.25, 0.5, 1.0, 2.5, 5.0].freeze
+ class << self
+ def current
+ Thread.current[THREAD_KEY]
+ end
+
+ def prometheus_metric(name, type, &block)
+ fetch_metric(type, name) do
+ # set default metric options
+ docstring "#{name.to_s.humanize} #{type}"
+
+ evaluate(&block)
+ # always filter sensitive labels and merge with base ones
+ label_keys BASE_LABEL_KEYS | (label_keys - ::Gitlab::Metrics::Transaction::FILTERED_LABEL_KEYS)
+ end
+ end
+ end
+
def initialize(env)
super()
@env = env
diff --git a/lib/gitlab/middleware/compressed_json.rb b/lib/gitlab/middleware/compressed_json.rb
new file mode 100644
index 00000000000..ef6e0db5673
--- /dev/null
+++ b/lib/gitlab/middleware/compressed_json.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Middleware
+ class CompressedJson
+ COLLECTOR_PATH = '/api/v4/error_tracking/collector'
+ MAXIMUM_BODY_SIZE = 200.kilobytes.to_i
+
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ if compressed_et_request?(env)
+ input = extract(env['rack.input'])
+
+ if input.length > MAXIMUM_BODY_SIZE
+ return too_large
+ end
+
+ env.delete('HTTP_CONTENT_ENCODING')
+ env['CONTENT_LENGTH'] = input.length
+ env['rack.input'] = StringIO.new(input)
+ end
+
+ @app.call(env)
+ end
+
+ def compressed_et_request?(env)
+ post_request?(env) &&
+ gzip_encoding?(env) &&
+ match_content_type?(env) &&
+ match_path?(env)
+ end
+
+ def too_large
+ [413, { 'Content-Type' => 'text/plain' }, ['Payload Too Large']]
+ end
+
+ def relative_url
+ File.join('', Gitlab.config.gitlab.relative_url_root).chomp('/')
+ end
+
+ def extract(input)
+ Zlib::GzipReader.new(input).read(MAXIMUM_BODY_SIZE + 1)
+ end
+
+ def post_request?(env)
+ env['REQUEST_METHOD'] == 'POST'
+ end
+
+ def gzip_encoding?(env)
+ env['HTTP_CONTENT_ENCODING'] == 'gzip'
+ end
+
+ def match_content_type?(env)
+ env['CONTENT_TYPE'] == 'application/json' ||
+ env['CONTENT_TYPE'] == 'application/x-sentry-envelope'
+ end
+
+ def match_path?(env)
+ env['PATH_INFO'].start_with?((File.join(relative_url, COLLECTOR_PATH)))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb
index a1a0356ff58..bfa4e4cf5f8 100644
--- a/lib/gitlab/middleware/go.rb
+++ b/lib/gitlab/middleware/go.rb
@@ -27,6 +27,8 @@ module Gitlab
path: request.fullpath
)
Rack::Response.new('', 403).finish
+ rescue Gitlab::Auth::MissingPersonalAccessTokenError
+ Rack::Response.new('', 401).finish
end
private
diff --git a/lib/gitlab/middleware/query_analyzer.rb b/lib/gitlab/middleware/query_analyzer.rb
new file mode 100644
index 00000000000..8d63c644a69
--- /dev/null
+++ b/lib/gitlab/middleware/query_analyzer.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Middleware
+ class QueryAnalyzer
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ ::Gitlab::Database::QueryAnalyzer.instance.within { @app.call(env) }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/middleware/release_env.rb b/lib/gitlab/middleware/release_env.rb
index 0719fb2e8c6..2439e873e0b 100644
--- a/lib/gitlab/middleware/release_env.rb
+++ b/lib/gitlab/middleware/release_env.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -14,3 +15,5 @@ module Gitlab
end
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/pagination/gitaly_keyset_pager.rb b/lib/gitlab/pagination/gitaly_keyset_pager.rb
index a16bf7a379c..99a3145104a 100644
--- a/lib/gitlab/pagination/gitaly_keyset_pager.rb
+++ b/lib/gitlab/pagination/gitaly_keyset_pager.rb
@@ -30,6 +30,8 @@ module Gitlab
if finder.is_a?(BranchesFinder)
Feature.enabled?(:branch_list_keyset_pagination, project, default_enabled: :yaml)
+ elsif finder.is_a?(TagsFinder)
+ Feature.enabled?(:tag_list_keyset_pagination, project, default_enabled: :yaml)
elsif finder.is_a?(::Repositories::TreeFinder)
Feature.enabled?(:repository_tree_gitaly_pagination, project, default_enabled: :yaml)
else
@@ -42,6 +44,8 @@ module Gitlab
if finder.is_a?(BranchesFinder)
Feature.enabled?(:branch_list_keyset_pagination, project, default_enabled: :yaml)
+ elsif finder.is_a?(TagsFinder)
+ Feature.enabled?(:tag_list_keyset_pagination, project, default_enabled: :yaml)
elsif finder.is_a?(::Repositories::TreeFinder)
Feature.enabled?(:repository_tree_gitaly_pagination, project, default_enabled: :yaml)
else
diff --git a/lib/gitlab/patch/sidekiq_client.rb b/lib/gitlab/patch/sidekiq_client.rb
new file mode 100644
index 00000000000..2de13560cce
--- /dev/null
+++ b/lib/gitlab/patch/sidekiq_client.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Patch
+ module SidekiqClient
+ private
+
+ # This is a copy of https://github.com/mperham/sidekiq/blob/v6.2.2/lib/sidekiq/client.rb#L187-L194
+ # but using `conn.pipelined` instead of `conn.multi`. The multi call isn't needed here because in
+ # the case of scheduled jobs, only one Redis call is made. For other jobs, we don't really need
+ # the commands to be atomic.
+ def raw_push(payloads)
+ @redis_pool.with do |conn| # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ conn.pipelined do
+ atomic_push(conn, payloads)
+ end
+ end
+ true
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/patch/sidekiq_cron_poller.rb b/lib/gitlab/patch/sidekiq_cron_poller.rb
new file mode 100644
index 00000000000..56ca24c68f5
--- /dev/null
+++ b/lib/gitlab/patch/sidekiq_cron_poller.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Patch
+ module SidekiqCronPoller
+ def enqueue
+ Rails.application.reloader.wrap do
+ ::Gitlab::WithRequestStore.with_request_store do
+ super
+ ensure
+ ::Gitlab::Database::LoadBalancing.release_hosts
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/project_template.rb b/lib/gitlab/project_template.rb
index 8875e6320c7..d53b11fe98c 100644
--- a/lib/gitlab/project_template.rb
+++ b/lib/gitlab/project_template.rb
@@ -66,6 +66,7 @@ module Gitlab
ProjectTemplate.new('nfhexo', 'Netlify/Hexo', _('A Hexo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features'), 'https://gitlab.com/pages/nfhexo', 'illustrations/logos/netlify.svg'),
ProjectTemplate.new('salesforcedx', 'SalesforceDX', _('A project boilerplate for Salesforce App development with Salesforce Developer tools'), 'https://gitlab.com/gitlab-org/project-templates/salesforcedx'),
ProjectTemplate.new('serverless_framework', 'Serverless Framework/JS', _('A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages'), 'https://gitlab.com/gitlab-org/project-templates/serverless-framework', 'illustrations/logos/serverless_framework.svg'),
+ ProjectTemplate.new('tencent_serverless_framework', 'Tencent Serverless Framework/NextjsSSR', _('A project boilerplate for Tencent Serverless Framework that uses Next.js SSR'), 'https://gitlab.com/gitlab-org/project-templates/nextjsssr_demo', 'illustrations/logos/tencent_serverless_framework.svg'),
ProjectTemplate.new('jsonnet', 'Jsonnet for Dynamic Child Pipelines', _('An example showing how to use Jsonnet with GitLab dynamic child pipelines'), 'https://gitlab.com/gitlab-org/project-templates/jsonnet'),
ProjectTemplate.new('cluster_management', 'GitLab Cluster Management', _('An example project for managing Kubernetes clusters integrated with GitLab'), 'https://gitlab.com/gitlab-org/project-templates/cluster-management'),
ProjectTemplate.new('kotlin_native_linux', 'Kotlin Native Linux', _('A basic template for developing Linux programs using Kotlin Native'), 'https://gitlab.com/gitlab-org/project-templates/kotlin-native-linux')
diff --git a/lib/gitlab/prometheus/queries/validate_query.rb b/lib/gitlab/prometheus/queries/validate_query.rb
index 1f55f3e9768..160db7d44bc 100644
--- a/lib/gitlab/prometheus/queries/validate_query.rb
+++ b/lib/gitlab/prometheus/queries/validate_query.rb
@@ -7,7 +7,7 @@ module Gitlab
def query(query)
client_query(query)
{ valid: true }
- rescue Gitlab::PrometheusClient::QueryError, Gitlab::HTTP::BlockedUrlError => ex
+ rescue Gitlab::PrometheusClient::QueryError, Gitlab::PrometheusClient::ConnectionError => ex
{ valid: false, error: ex.message }
end
diff --git a/lib/gitlab/prometheus_client.rb b/lib/gitlab/prometheus_client.rb
index 8182dbad4f8..dda28ffdf90 100644
--- a/lib/gitlab/prometheus_client.rb
+++ b/lib/gitlab/prometheus_client.rb
@@ -151,12 +151,8 @@ module Gitlab
def get(path, args)
Gitlab::HTTP.get(path, { query: args }.merge(http_options) )
- rescue SocketError
- raise PrometheusClient::ConnectionError, "Can't connect to #{api_url}"
- rescue OpenSSL::SSL::SSLError
- raise PrometheusClient::ConnectionError, "#{api_url} contains invalid SSL data"
- rescue Errno::ECONNREFUSED
- raise PrometheusClient::ConnectionError, 'Connection refused'
+ rescue *Gitlab::HTTP::HTTP_ERRORS => e
+ raise PrometheusClient::ConnectionError, e.message
end
def handle_management_api_response(response)
diff --git a/lib/gitlab/quick_actions/issuable_actions.rb b/lib/gitlab/quick_actions/issuable_actions.rb
index cf5c9296d8c..4bac0643a91 100644
--- a/lib/gitlab/quick_actions/issuable_actions.rb
+++ b/lib/gitlab/quick_actions/issuable_actions.rb
@@ -84,8 +84,7 @@ module Gitlab
params '~label1 ~"label 2"'
types Issuable
condition do
- parent &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", parent) &&
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target) &&
find_labels.any?
end
command :label do |labels_param|
@@ -107,7 +106,7 @@ module Gitlab
condition do
quick_action_target.persisted? &&
quick_action_target.labels.any? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", parent)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
command :unlabel, :remove_label do |labels_param = nil|
if labels_param.present?
@@ -139,7 +138,7 @@ module Gitlab
condition do
quick_action_target.persisted? &&
quick_action_target.labels.any? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", parent)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
command :relabel do |labels_param|
run_label_command(labels: find_labels(labels_param), command: :relabel, updates_key: :label_ids)
diff --git a/lib/gitlab/quick_actions/issue_actions.rb b/lib/gitlab/quick_actions/issue_actions.rb
index c5cf3262039..a55ead519e2 100644
--- a/lib/gitlab/quick_actions/issue_actions.rb
+++ b/lib/gitlab/quick_actions/issue_actions.rb
@@ -19,7 +19,7 @@ module Gitlab
types Issue
condition do
quick_action_target.respond_to?(:due_date) &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
parse_params do |due_date_param|
Chronic.parse(due_date_param).try(:to_date)
@@ -40,7 +40,7 @@ module Gitlab
quick_action_target.persisted? &&
quick_action_target.respond_to?(:due_date) &&
quick_action_target.due_date? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
command :remove_due_date do
@updates[:due_date] = nil
@@ -54,7 +54,7 @@ module Gitlab
params '~"Target column"'
types Issue
condition do
- current_user.can?(:"update_#{quick_action_target.to_ability_name}", quick_action_target) &&
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target) &&
quick_action_target.project.boards.count == 1
end
command :board_move do |target_list_name|
@@ -86,7 +86,7 @@ module Gitlab
types Issue
condition do
quick_action_target.persisted? &&
- current_user.can?(:"update_#{quick_action_target.to_ability_name}", quick_action_target)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
command :duplicate do |duplicate_param|
canonical_issue = extract_references(duplicate_param, :issue).first
@@ -172,7 +172,7 @@ module Gitlab
condition do
quick_action_target.issue_type_supports?(:confidentiality) &&
!quick_action_target.confidential? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", quick_action_target)
+ current_user.can?(:set_confidentiality, quick_action_target)
end
command :confidential do
@updates[:confidential] = true
@@ -264,6 +264,27 @@ module Gitlab
end
end
+ desc _('Promote issue to incident')
+ explanation _('Promotes issue to incident')
+ types Issue
+ condition do
+ quick_action_target.persisted? &&
+ !quick_action_target.incident? &&
+ current_user.can?(:update_issue, quick_action_target)
+ end
+ command :promote_to_incident do
+ issue = ::Issues::UpdateService
+ .new(project: quick_action_target.project, current_user: current_user, params: { issue_type: 'incident' })
+ .execute(quick_action_target)
+
+ @execution_message[:promote_to_incident] =
+ if issue.incident?
+ _('Issue has been promoted to incident')
+ else
+ _('Failed to promote issue to incident')
+ end
+ end
+
private
def zoom_link_service
diff --git a/lib/gitlab/quick_actions/issue_and_merge_request_actions.rb b/lib/gitlab/quick_actions/issue_and_merge_request_actions.rb
index b53fdd60606..4a75fa0a571 100644
--- a/lib/gitlab/quick_actions/issue_and_merge_request_actions.rb
+++ b/lib/gitlab/quick_actions/issue_and_merge_request_actions.rb
@@ -26,7 +26,7 @@ module Gitlab
end
types Issue, MergeRequest
condition do
- quick_action_target.supports_assignee? && current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project)
+ quick_action_target.supports_assignee? && current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
parse_params do |assignee_param|
extract_users(assignee_param)
@@ -66,7 +66,7 @@ module Gitlab
condition do
quick_action_target.persisted? &&
quick_action_target.assignees.any? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
parse_params do |unassign_param|
# When multiple users are assigned, all will be unassigned if multiple assignees are no longer allowed
@@ -92,7 +92,7 @@ module Gitlab
types Issue, MergeRequest
condition do
quick_action_target.supports_milestone? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project) &&
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target) &&
find_milestones(project, state: 'active').any?
end
parse_params do |milestone_param|
@@ -115,7 +115,7 @@ module Gitlab
quick_action_target.persisted? &&
quick_action_target.milestone_id? &&
quick_action_target.supports_milestone? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
command :remove_milestone do
@updates[:milestone_id] = nil
@@ -128,7 +128,7 @@ module Gitlab
params '#issue | !merge_request'
types Issue, MergeRequest
condition do
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", quick_action_target)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
parse_params do |issuable_param|
extract_references(issuable_param, :issue).first ||
@@ -225,7 +225,7 @@ module Gitlab
condition do
quick_action_target.persisted? &&
!quick_action_target.discussion_locked? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", quick_action_target)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
command :lock do
@updates[:discussion_locked] = true
@@ -238,7 +238,7 @@ module Gitlab
condition do
quick_action_target.persisted? &&
quick_action_target.discussion_locked? &&
- current_user.can?(:"admin_#{quick_action_target.to_ability_name}", quick_action_target)
+ current_user.can?(:"set_#{quick_action_target.to_ability_name}_metadata", quick_action_target)
end
command :unlock do
@updates[:discussion_locked] = false
diff --git a/lib/gitlab/redis/hll.rb b/lib/gitlab/redis/hll.rb
index 0d04545688b..4d1855e4637 100644
--- a/lib/gitlab/redis/hll.rb
+++ b/lib/gitlab/redis/hll.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -51,3 +52,5 @@ module Gitlab
end
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/redis/multi_store.rb b/lib/gitlab/redis/multi_store.rb
new file mode 100644
index 00000000000..f930a0040bc
--- /dev/null
+++ b/lib/gitlab/redis/multi_store.rb
@@ -0,0 +1,215 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Redis
+ class MultiStore
+ include Gitlab::Utils::StrongMemoize
+
+ class ReadFromPrimaryError < StandardError
+ def message
+ 'Value not found on the redis primary store. Read from the redis secondary store successful.'
+ end
+ end
+ class MethodMissingError < StandardError
+ def message
+ 'Method missing. Falling back to execute method on the redis secondary store.'
+ end
+ end
+
+ attr_reader :primary_store, :secondary_store, :instance_name
+
+ FAILED_TO_READ_ERROR_MESSAGE = 'Failed to read from the redis primary_store.'
+ FAILED_TO_WRITE_ERROR_MESSAGE = 'Failed to write to the redis primary_store.'
+
+ READ_COMMANDS = %i(
+ get
+ mget
+ smembers
+ scard
+ ).freeze
+
+ WRITE_COMMANDS = %i(
+ set
+ setnx
+ setex
+ sadd
+ srem
+ del
+ pipelined
+ flushdb
+ ).freeze
+
+ def initialize(primary_store, secondary_store, instance_name = nil)
+ @primary_store = primary_store
+ @secondary_store = secondary_store
+ @instance_name = instance_name
+
+ validate_stores!
+ end
+
+ READ_COMMANDS.each do |name|
+ define_method(name) do |*args, &block|
+ if multi_store_enabled?
+ read_command(name, *args, &block)
+ else
+ secondary_store.send(name, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+
+ WRITE_COMMANDS.each do |name|
+ define_method(name) do |*args, &block|
+ if multi_store_enabled?
+ write_command(name, *args, &block)
+ else
+ secondary_store.send(name, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+
+ def method_missing(...)
+ return @instance.send(...) if @instance # rubocop:disable GitlabSecurity/PublicSend
+
+ log_method_missing(...)
+
+ secondary_store.send(...) # rubocop:disable GitlabSecurity/PublicSend
+ end
+
+ def respond_to_missing?(command_name, include_private = false)
+ true
+ end
+
+ # This is needed because of Redis::Rack::Connection is requiring Redis::Store
+ # https://github.com/redis-store/redis-rack/blob/a833086ba494083b6a384a1a4e58b36573a9165d/lib/redis/rack/connection.rb#L15
+ # Done similarly in https://github.com/lsegal/yard/blob/main/lib/yard/templates/template.rb#L122
+ def is_a?(klass)
+ return true if klass == secondary_store.class
+
+ super(klass)
+ end
+ alias_method :kind_of?, :is_a?
+
+ def to_s
+ if multi_store_enabled?
+ primary_store.to_s
+ else
+ secondary_store.to_s
+ end
+ end
+
+ private
+
+ def log_method_missing(command_name, *_args)
+ log_error(MethodMissingError.new, command_name)
+ increment_method_missing_count(command_name)
+ end
+
+ def read_command(command_name, *args, &block)
+ if @instance
+ send_command(@instance, command_name, *args, &block)
+ else
+ read_one_with_fallback(command_name, *args, &block)
+ end
+ end
+
+ def write_command(command_name, *args, &block)
+ if @instance
+ send_command(@instance, command_name, *args, &block)
+ else
+ write_both(command_name, *args, &block)
+ end
+ end
+
+ def read_one_with_fallback(command_name, *args, &block)
+ begin
+ value = send_command(primary_store, command_name, *args, &block)
+ rescue StandardError => e
+ log_error(e, command_name,
+ multi_store_error_message: FAILED_TO_READ_ERROR_MESSAGE)
+ end
+
+ value ||= fallback_read(command_name, *args, &block)
+
+ value
+ end
+
+ def fallback_read(command_name, *args, &block)
+ value = send_command(secondary_store, command_name, *args, &block)
+
+ if value
+ log_error(ReadFromPrimaryError.new, command_name)
+ increment_read_fallback_count(command_name)
+ end
+
+ value
+ end
+
+ def write_both(command_name, *args, &block)
+ begin
+ send_command(primary_store, command_name, *args, &block)
+ rescue StandardError => e
+ log_error(e, command_name,
+ multi_store_error_message: FAILED_TO_WRITE_ERROR_MESSAGE)
+ end
+
+ send_command(secondary_store, command_name, *args, &block)
+ end
+
+ def multi_store_enabled?
+ Feature.enabled?(:use_multi_store, default_enabled: :yaml) && !same_redis_store?
+ end
+
+ def same_redis_store?
+ strong_memoize(:same_redis_store) do
+ # <Redis client v4.4.0 for redis:///path_to/redis/redis.socket/5>"
+ primary_store.inspect == secondary_store.inspect
+ end
+ end
+
+ # rubocop:disable GitlabSecurity/PublicSend
+ def send_command(redis_instance, command_name, *args, &block)
+ if block_given?
+ # Make sure that block is wrapped and executed only on the redis instance that is executing the block
+ redis_instance.send(command_name, *args) do |*params|
+ with_instance(redis_instance, *params, &block)
+ end
+ else
+ redis_instance.send(command_name, *args)
+ end
+ end
+ # rubocop:enable GitlabSecurity/PublicSend
+
+ def with_instance(instance, *params)
+ @instance = instance
+
+ yield(*params)
+ ensure
+ @instance = nil
+ end
+
+ def increment_read_fallback_count(command_name)
+ @read_fallback_counter ||= Gitlab::Metrics.counter(:gitlab_redis_multi_store_read_fallback_total, 'Client side Redis MultiStore reading fallback')
+ @read_fallback_counter.increment(command: command_name, instance_name: instance_name)
+ end
+
+ def increment_method_missing_count(command_name)
+ @method_missing_counter ||= Gitlab::Metrics.counter(:gitlab_redis_multi_store_method_missing_total, 'Client side Redis MultiStore method missing')
+ @method_missing_counter.increment(command: command_name, innamece_name: instance_name)
+ end
+
+ def validate_stores!
+ raise ArgumentError, 'primary_store is required' unless primary_store
+ raise ArgumentError, 'secondary_store is required' unless secondary_store
+ raise ArgumentError, 'invalid primary_store' unless primary_store.is_a?(::Redis)
+ raise ArgumentError, 'invalid secondary_store' unless secondary_store.is_a?(::Redis)
+ end
+
+ def log_error(exception, command_name, extra = {})
+ Gitlab::ErrorTracking.log_exception(
+ exception,
+ command_name: command_name,
+ extra: extra.merge(instance_name: instance_name))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/redis/wrapper.rb b/lib/gitlab/redis/wrapper.rb
index 7b804038146..985c8dc619c 100644
--- a/lib/gitlab/redis/wrapper.rb
+++ b/lib/gitlab/redis/wrapper.rb
@@ -17,7 +17,7 @@ module Gitlab
module Redis
class Wrapper
class << self
- delegate :params, :url, to: :new
+ delegate :params, :url, :store, to: :new
def with
pool.with { |redis| yield redis }
@@ -126,6 +126,10 @@ module Gitlab
sentinels && !sentinels.empty?
end
+ def store(extras = {})
+ ::Redis::Store::Factory.create(redis_store_options.merge(extras))
+ end
+
private
def redis_store_options
diff --git a/lib/gitlab/runtime.rb b/lib/gitlab/runtime.rb
index f60cac0aff0..5fbbfd90be1 100644
--- a/lib/gitlab/runtime.rb
+++ b/lib/gitlab/runtime.rb
@@ -63,12 +63,8 @@ module Gitlab
puma?
end
- def action_cable?
- web_server? && (!!defined?(ACTION_CABLE_SERVER) || Gitlab::ActionCable::Config.in_app?)
- end
-
def multi_threaded?
- puma? || sidekiq? || action_cable?
+ puma? || sidekiq?
end
def puma_in_clustered_mode?
@@ -84,12 +80,15 @@ module Gitlab
if puma? && Puma.respond_to?(:cli_config)
threads += Puma.cli_config.options[:max_threads]
elsif sidekiq?
- # An extra thread for the poller in Sidekiq Cron:
+ # 2 extra threads for the pollers in Sidekiq and Sidekiq Cron:
# https://github.com/ondrejbartas/sidekiq-cron#under-the-hood
- threads += Sidekiq.options[:concurrency] + 1
+ #
+ # These threads execute Sidekiq client middleware when jobs
+ # are enqueued and those can access DB / Redis.
+ threads += Sidekiq.options[:concurrency] + 2
end
- if action_cable?
+ if web_server?
threads += Gitlab::ActionCable::Config.worker_pool_size
end
diff --git a/lib/gitlab/saas.rb b/lib/gitlab/saas.rb
index 9220ad1be6c..1e00bd4cbfc 100644
--- a/lib/gitlab/saas.rb
+++ b/lib/gitlab/saas.rb
@@ -38,11 +38,11 @@ module Gitlab
end
def self.about_pricing_url
- "https://about.gitlab.com/pricing"
+ "https://about.gitlab.com/pricing/"
end
def self.about_pricing_faq_url
- "https://about.gitlab.com/gitlab-com/#faq"
+ "https://about.gitlab.com/pricing#faq"
end
end
end
diff --git a/lib/gitlab/search_results.rb b/lib/gitlab/search_results.rb
index 217a48e740d..37414f9e2b1 100644
--- a/lib/gitlab/search_results.rb
+++ b/lib/gitlab/search_results.rb
@@ -115,6 +115,11 @@ module Gitlab
{}
end
+ # aggregations are only performed by Elasticsearch backed results
+ def aggregations(scope)
+ []
+ end
+
private
def collection_for(scope)
diff --git a/lib/gitlab/setup_helper.rb b/lib/gitlab/setup_helper.rb
index 751405f1045..3a31f651714 100644
--- a/lib/gitlab/setup_helper.rb
+++ b/lib/gitlab/setup_helper.rb
@@ -104,9 +104,6 @@ module Gitlab
socket_filename = options[:gitaly_socket] || "gitaly.socket"
prometheus_listen_addr = options[:prometheus_listen_addr]
- git_bin_path = File.expand_path('../gitaly/_build/deps/git/install/bin/git')
- git_bin_path = nil unless File.exist?(git_bin_path)
-
config = {
# Override the set gitaly_address since Praefect is in the loop
socket_path: File.join(gitaly_dir, socket_filename),
@@ -116,8 +113,8 @@ module Gitlab
# sidekiq jobs, and concurrency will be low anyway in test.
git: {
catfile_cache_size: 5,
- bin_path: git_bin_path
- }.compact,
+ bin_path: File.expand_path(File.join(gitaly_dir, '_build', 'deps', 'git', 'install', 'bin', 'git'))
+ },
prometheus_listen_addr: prometheus_listen_addr
}.compact
diff --git a/lib/gitlab/sidekiq_cluster.rb b/lib/gitlab/sidekiq_cluster.rb
deleted file mode 100644
index cc1bd282da8..00000000000
--- a/lib/gitlab/sidekiq_cluster.rb
+++ /dev/null
@@ -1,171 +0,0 @@
-# frozen_string_literal: true
-
-require 'shellwords'
-
-module Gitlab
- module SidekiqCluster
- # The signals that should terminate both the master and workers.
- TERMINATE_SIGNALS = %i(INT TERM).freeze
-
- # The signals that should simply be forwarded to the workers.
- FORWARD_SIGNALS = %i(TTIN USR1 USR2 HUP).freeze
-
- # Traps the given signals and yields the block whenever these signals are
- # received.
- #
- # The block is passed the name of the signal.
- #
- # Example:
- #
- # trap_signals(%i(HUP TERM)) do |signal|
- # ...
- # end
- def self.trap_signals(signals)
- signals.each do |signal|
- trap(signal) do
- yield signal
- end
- end
- end
-
- def self.trap_terminate(&block)
- trap_signals(TERMINATE_SIGNALS, &block)
- end
-
- def self.trap_forward(&block)
- trap_signals(FORWARD_SIGNALS, &block)
- end
-
- def self.signal(pid, signal)
- Process.kill(signal, pid)
- true
- rescue Errno::ESRCH
- false
- end
-
- def self.signal_processes(pids, signal)
- pids.each { |pid| signal(pid, signal) }
- end
-
- # Starts Sidekiq workers for the pairs of processes.
- #
- # Example:
- #
- # start([ ['foo'], ['bar', 'baz'] ], :production)
- #
- # This would start two Sidekiq processes: one processing "foo", and one
- # processing "bar" and "baz". Each one is placed in its own process group.
- #
- # queues - An Array containing Arrays. Each sub Array should specify the
- # queues to use for a single process.
- #
- # directory - The directory of the Rails application.
- #
- # Returns an Array containing the PIDs of the started processes.
- def self.start(queues, env: :development, directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, timeout: CLI::DEFAULT_SOFT_TIMEOUT_SECONDS, dryrun: false)
- queues.map.with_index do |pair, index|
- start_sidekiq(pair, env: env,
- directory: directory,
- max_concurrency: max_concurrency,
- min_concurrency: min_concurrency,
- worker_id: index,
- timeout: timeout,
- dryrun: dryrun)
- end
- end
-
- # Starts a Sidekiq process that processes _only_ the given queues.
- #
- # Returns the PID of the started process.
- def self.start_sidekiq(queues, env:, directory:, max_concurrency:, min_concurrency:, worker_id:, timeout:, dryrun:)
- counts = count_by_queue(queues)
-
- cmd = %w[bundle exec sidekiq]
- cmd << "-c#{self.concurrency(queues, min_concurrency, max_concurrency)}"
- cmd << "-e#{env}"
- cmd << "-t#{timeout}"
- cmd << "-gqueues:#{proc_details(counts)}"
- cmd << "-r#{directory}"
-
- counts.each do |queue, count|
- cmd << "-q#{queue},#{count}"
- end
-
- if dryrun
- puts Shellwords.join(cmd) # rubocop:disable Rails/Output
- return
- end
-
- pid = Process.spawn(
- { 'ENABLE_SIDEKIQ_CLUSTER' => '1',
- 'SIDEKIQ_WORKER_ID' => worker_id.to_s },
- *cmd,
- pgroup: true,
- err: $stderr,
- out: $stdout
- )
-
- wait_async(pid)
-
- pid
- end
-
- def self.count_by_queue(queues)
- queues.tally
- end
-
- def self.proc_details(counts)
- counts.map do |queue, count|
- if count == 1
- queue
- else
- "#{queue} (#{count})"
- end
- end.join(',')
- end
-
- def self.concurrency(queues, min_concurrency, max_concurrency)
- concurrency_from_queues = queues.length + 1
- max = max_concurrency > 0 ? max_concurrency : concurrency_from_queues
- min = [min_concurrency, max].min
-
- concurrency_from_queues.clamp(min, max)
- end
-
- # Waits for the given process to complete using a separate thread.
- def self.wait_async(pid)
- Thread.new do
- Process.wait(pid) rescue Errno::ECHILD
- end
- end
-
- # Returns true if all the processes are alive.
- def self.all_alive?(pids)
- pids.each do |pid|
- return false unless process_alive?(pid)
- end
-
- true
- end
-
- def self.any_alive?(pids)
- pids_alive(pids).any?
- end
-
- def self.pids_alive(pids)
- pids.select { |pid| process_alive?(pid) }
- end
-
- def self.process_alive?(pid)
- # Signal 0 tests whether the process exists and we have access to send signals
- # but is otherwise a noop (doesn't actually send a signal to the process)
- signal(pid, 0)
- end
-
- def self.write_pid(path)
- File.open(path, 'w') do |handle|
- handle.write(Process.pid.to_s)
- end
- end
- end
-end
diff --git a/lib/gitlab/sidekiq_cluster/cli.rb b/lib/gitlab/sidekiq_cluster/cli.rb
deleted file mode 100644
index 3dee257229d..00000000000
--- a/lib/gitlab/sidekiq_cluster/cli.rb
+++ /dev/null
@@ -1,230 +0,0 @@
-# frozen_string_literal: true
-
-require 'optparse'
-require 'logger'
-require 'time'
-
-module Gitlab
- module SidekiqCluster
- class CLI
- CHECK_TERMINATE_INTERVAL_SECONDS = 1
-
- # How long to wait when asking for a clean termination.
- # It maps the Sidekiq default timeout:
- # https://github.com/mperham/sidekiq/wiki/Signals#term
- #
- # This value is passed to Sidekiq's `-t` if none
- # is given through arguments.
- DEFAULT_SOFT_TIMEOUT_SECONDS = 25
-
- # After surpassing the soft timeout.
- DEFAULT_HARD_TIMEOUT_SECONDS = 5
-
- CommandError = Class.new(StandardError)
-
- def initialize(log_output = $stderr)
- require_relative '../../../lib/gitlab/sidekiq_logging/json_formatter'
-
- # As recommended by https://github.com/mperham/sidekiq/wiki/Advanced-Options#concurrency
- @max_concurrency = 50
- @min_concurrency = 0
- @environment = ENV['RAILS_ENV'] || 'development'
- @pid = nil
- @interval = 5
- @alive = true
- @processes = []
- @logger = Logger.new(log_output)
- @logger.formatter = ::Gitlab::SidekiqLogging::JSONFormatter.new
- @rails_path = Dir.pwd
- @dryrun = false
- @list_queues = false
- end
-
- def run(argv = ARGV)
- if argv.empty?
- raise CommandError,
- 'You must specify at least one queue to start a worker for'
- end
-
- option_parser.parse!(argv)
-
- if @dryrun && @list_queues
- raise CommandError,
- 'The --dryrun and --list-queues options are mutually exclusive'
- end
-
- worker_metadatas = SidekiqConfig::CliMethods.worker_metadatas(@rails_path)
- worker_queues = SidekiqConfig::CliMethods.worker_queues(@rails_path)
-
- queue_groups = argv.map do |queues_or_query_string|
- if queues_or_query_string =~ /[\r\n]/
- raise CommandError,
- 'The queue arguments cannot contain newlines'
- end
-
- next worker_queues if queues_or_query_string == SidekiqConfig::WorkerMatcher::WILDCARD_MATCH
-
- # When using the queue query syntax, we treat each queue group
- # as a worker attribute query, and resolve the queues for the
- # queue group using this query.
-
- if @queue_selector
- SidekiqConfig::CliMethods.query_queues(queues_or_query_string, worker_metadatas)
- else
- SidekiqConfig::CliMethods.expand_queues(queues_or_query_string.split(','), worker_queues)
- end
- end
-
- if @negate_queues
- queue_groups.map! { |queues| worker_queues - queues }
- end
-
- if queue_groups.all?(&:empty?)
- raise CommandError,
- 'No queues found, you must select at least one queue'
- end
-
- if @list_queues
- puts queue_groups.map(&:sort) # rubocop:disable Rails/Output
-
- return
- end
-
- unless @dryrun
- @logger.info("Starting cluster with #{queue_groups.length} processes")
- end
-
- @processes = SidekiqCluster.start(
- queue_groups,
- env: @environment,
- directory: @rails_path,
- max_concurrency: @max_concurrency,
- min_concurrency: @min_concurrency,
- dryrun: @dryrun,
- timeout: soft_timeout_seconds
- )
-
- return if @dryrun
-
- write_pid
- trap_signals
- start_loop
- end
-
- def write_pid
- SidekiqCluster.write_pid(@pid) if @pid
- end
-
- def soft_timeout_seconds
- @soft_timeout_seconds || DEFAULT_SOFT_TIMEOUT_SECONDS
- end
-
- # The amount of time it'll wait for killing the alive Sidekiq processes.
- def hard_timeout_seconds
- soft_timeout_seconds + DEFAULT_HARD_TIMEOUT_SECONDS
- end
-
- def monotonic_time
- Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
- end
-
- def continue_waiting?(deadline)
- SidekiqCluster.any_alive?(@processes) && monotonic_time < deadline
- end
-
- def hard_stop_stuck_pids
- SidekiqCluster.signal_processes(SidekiqCluster.pids_alive(@processes), "-KILL")
- end
-
- def wait_for_termination
- deadline = monotonic_time + hard_timeout_seconds
- sleep(CHECK_TERMINATE_INTERVAL_SECONDS) while continue_waiting?(deadline)
-
- hard_stop_stuck_pids
- end
-
- def trap_signals
- SidekiqCluster.trap_terminate do |signal|
- @alive = false
- SidekiqCluster.signal_processes(@processes, signal)
- wait_for_termination
- end
-
- SidekiqCluster.trap_forward do |signal|
- SidekiqCluster.signal_processes(@processes, signal)
- end
- end
-
- def start_loop
- while @alive
- sleep(@interval)
-
- unless SidekiqCluster.all_alive?(@processes)
- # If a child process died we'll just terminate the whole cluster. It's up to
- # runit and such to then restart the cluster.
- @logger.info('A worker terminated, shutting down the cluster')
-
- SidekiqCluster.signal_processes(@processes, :TERM)
- break
- end
- end
- end
-
- def option_parser
- OptionParser.new do |opt|
- opt.banner = "#{File.basename(__FILE__)} [QUEUE,QUEUE] [QUEUE] ... [OPTIONS]"
-
- opt.separator "\nOptions:\n"
-
- opt.on('-h', '--help', 'Shows this help message') do
- abort opt.to_s
- end
-
- opt.on('-m', '--max-concurrency INT', 'Maximum threads to use with Sidekiq (default: 50, 0 to disable)') do |int|
- @max_concurrency = int.to_i
- end
-
- opt.on('--min-concurrency INT', 'Minimum threads to use with Sidekiq (default: 0)') do |int|
- @min_concurrency = int.to_i
- end
-
- opt.on('-e', '--environment ENV', 'The application environment') do |env|
- @environment = env
- end
-
- opt.on('-P', '--pidfile PATH', 'Path to the PID file') do |pid|
- @pid = pid
- end
-
- opt.on('-r', '--require PATH', 'Location of the Rails application') do |path|
- @rails_path = path
- end
-
- opt.on('--queue-selector', 'Run workers based on the provided selector') do |queue_selector|
- @queue_selector = queue_selector
- end
-
- opt.on('-n', '--negate', 'Run workers for all queues in sidekiq_queues.yml except the given ones') do
- @negate_queues = true
- end
-
- opt.on('-i', '--interval INT', 'The number of seconds to wait between worker checks') do |int|
- @interval = int.to_i
- end
-
- opt.on('-t', '--timeout INT', 'Graceful timeout for all running processes') do |timeout|
- @soft_timeout_seconds = timeout.to_i
- end
-
- opt.on('-d', '--dryrun', 'Print commands that would be run without this flag, and quit') do |int|
- @dryrun = true
- end
-
- opt.on('--list-queues', 'List matching queues, and quit') do |int|
- @list_queues = true
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/sidekiq_config.rb b/lib/gitlab/sidekiq_config.rb
index 5663c51bb7a..07ddac209f8 100644
--- a/lib/gitlab/sidekiq_config.rb
+++ b/lib/gitlab/sidekiq_config.rb
@@ -6,11 +6,13 @@ module Gitlab
module SidekiqConfig
FOSS_QUEUE_CONFIG_PATH = 'app/workers/all_queues.yml'
EE_QUEUE_CONFIG_PATH = 'ee/app/workers/all_queues.yml'
+ JH_QUEUE_CONFIG_PATH = 'jh/app/workers/all_queues.yml'
SIDEKIQ_QUEUES_PATH = 'config/sidekiq_queues.yml'
QUEUE_CONFIG_PATHS = [
FOSS_QUEUE_CONFIG_PATH,
- (EE_QUEUE_CONFIG_PATH if Gitlab.ee?)
+ (EE_QUEUE_CONFIG_PATH if Gitlab.ee?),
+ (JH_QUEUE_CONFIG_PATH if Gitlab.jh?)
].compact.freeze
# This maps workers not in our application code to queues. We need
@@ -33,7 +35,7 @@ module Gitlab
weight: 2,
tags: []
)
- }.transform_values { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: false) }.freeze
+ }.transform_values { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: false, jh: false) }.freeze
class << self
include Gitlab::SidekiqConfig::CliMethods
@@ -58,10 +60,14 @@ module Gitlab
@workers ||= begin
result = []
result.concat(DEFAULT_WORKERS.values)
- result.concat(find_workers(Rails.root.join('app', 'workers'), ee: false))
+ result.concat(find_workers(Rails.root.join('app', 'workers'), ee: false, jh: false))
if Gitlab.ee?
- result.concat(find_workers(Rails.root.join('ee', 'app', 'workers'), ee: true))
+ result.concat(find_workers(Rails.root.join('ee', 'app', 'workers'), ee: true, jh: false))
+ end
+
+ if Gitlab.jh?
+ result.concat(find_workers(Rails.root.join('jh', 'app', 'workers'), ee: false, jh: true))
end
result
@@ -69,16 +75,26 @@ module Gitlab
end
def workers_for_all_queues_yml
- workers.partition(&:ee?).reverse.map(&:sort)
+ workers.each_with_object([[], [], []]) do |worker, array|
+ if worker.jh?
+ array[2].push(worker)
+ elsif worker.ee?
+ array[1].push(worker)
+ else
+ array[0].push(worker)
+ end
+ end.map(&:sort)
end
# YAML.load_file is OK here as we control the file contents
def all_queues_yml_outdated?
- foss_workers, ee_workers = workers_for_all_queues_yml
+ foss_workers, ee_workers, jh_workers = workers_for_all_queues_yml
return true if foss_workers != YAML.load_file(FOSS_QUEUE_CONFIG_PATH)
- Gitlab.ee? && ee_workers != YAML.load_file(EE_QUEUE_CONFIG_PATH)
+ return true if Gitlab.ee? && ee_workers != YAML.load_file(EE_QUEUE_CONFIG_PATH)
+
+ Gitlab.jh? && File.exist?(JH_QUEUE_CONFIG_PATH) && jh_workers != YAML.load_file(JH_QUEUE_CONFIG_PATH)
end
def queues_for_sidekiq_queues_yml
@@ -120,14 +136,14 @@ module Gitlab
private
- def find_workers(root, ee:)
+ def find_workers(root, ee:, jh:)
concerns = root.join('concerns').to_s
Dir[root.join('**', '*.rb')]
.reject { |path| path.start_with?(concerns) }
.map { |path| worker_from_path(path, root) }
.select { |worker| worker < Sidekiq::Worker }
- .map { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: ee) }
+ .map { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: ee, jh: jh) }
end
def worker_from_path(path, root)
diff --git a/lib/gitlab/sidekiq_config/cli_methods.rb b/lib/gitlab/sidekiq_config/cli_methods.rb
index 8eef15f9ccb..70798f8c3e8 100644
--- a/lib/gitlab/sidekiq_config/cli_methods.rb
+++ b/lib/gitlab/sidekiq_config/cli_methods.rb
@@ -18,6 +18,7 @@ module Gitlab
QUEUE_CONFIG_PATHS = begin
result = %w[app/workers/all_queues.yml]
result << 'ee/app/workers/all_queues.yml' if Gitlab.ee?
+ result << 'jh/app/workers/all_queues.yml' if Gitlab.jh?
result
end.freeze
diff --git a/lib/gitlab/sidekiq_config/worker.rb b/lib/gitlab/sidekiq_config/worker.rb
index a343573440f..1e3fb675ca7 100644
--- a/lib/gitlab/sidekiq_config/worker.rb
+++ b/lib/gitlab/sidekiq_config/worker.rb
@@ -13,15 +13,20 @@ module Gitlab
:worker_has_external_dependencies?,
to: :klass
- def initialize(klass, ee:)
+ def initialize(klass, ee:, jh: false)
@klass = klass
@ee = ee
+ @jh = jh
end
def ee?
@ee
end
+ def jh?
+ @jh
+ end
+
def ==(other)
to_yaml == case other
when self.class
diff --git a/lib/gitlab/sidekiq_enq.rb b/lib/gitlab/sidekiq_enq.rb
index d8a01ac8ef4..de0c00fe561 100644
--- a/lib/gitlab/sidekiq_enq.rb
+++ b/lib/gitlab/sidekiq_enq.rb
@@ -1,16 +1,44 @@
# frozen_string_literal: true
-# This is a copy of https://github.com/mperham/sidekiq/blob/32c55e31659a1e6bd42f98334cca5eef2863de8d/lib/sidekiq/scheduled.rb#L11-L34
-#
-# It effectively reverts
-# https://github.com/mperham/sidekiq/commit/9b75467b33759888753191413eddbc15c37a219e
-# because we observe that the extra ZREMs caused by this change can lead to high
-# CPU usage on Redis at peak times:
-# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1179
-#
module Gitlab
class SidekiqEnq
+ LUA_ZPOPBYSCORE = <<~EOS
+ local key, now = KEYS[1], ARGV[1]
+ local jobs = redis.call("zrangebyscore", key, "-inf", now, "limit", 0, 1)
+ if jobs[1] then
+ redis.call("zrem", key, jobs[1])
+ return jobs[1]
+ end
+ EOS
+
+ LUA_ZPOPBYSCORE_SHA = Digest::SHA1.hexdigest(LUA_ZPOPBYSCORE)
+
def enqueue_jobs(now = Time.now.to_f.to_s, sorted_sets = Sidekiq::Scheduled::SETS)
+ Rails.application.reloader.wrap do
+ ::Gitlab::WithRequestStore.with_request_store do
+ if Feature.enabled?(:atomic_sidekiq_scheduler, default_enabled: :yaml)
+ atomic_find_jobs_and_enqueue(now, sorted_sets)
+ else
+ find_jobs_and_enqueue(now, sorted_sets)
+ end
+
+ ensure
+ ::Gitlab::Database::LoadBalancing.release_hosts
+ end
+ end
+ end
+
+ private
+
+ # This is a copy of https://github.com/mperham/sidekiq/blob/32c55e31659a1e6bd42f98334cca5eef2863de8d/lib/sidekiq/scheduled.rb#L11-L34
+ #
+ # It effectively reverts
+ # https://github.com/mperham/sidekiq/commit/9b75467b33759888753191413eddbc15c37a219e
+ # because we observe that the extra ZREMs caused by this change can lead to high
+ # CPU usage on Redis at peak times:
+ # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1179
+ #
+ def find_jobs_and_enqueue(now, sorted_sets)
# A job's "score" in Redis is the time at which it should be processed.
# Just check Redis for the set of jobs with a timestamp before now.
Sidekiq.redis do |conn|
@@ -24,8 +52,7 @@ module Gitlab
# We need to go through the list one at a time to reduce the risk of something
# going wrong between the time jobs are popped from the scheduled queue and when
# they are pushed onto a work queue and losing the jobs.
- while (job = conn.zrangebyscore(sorted_set, "-inf", now, limit: [0, 1]).first)
-
+ while job = conn.zrangebyscore(sorted_set, "-inf", now, limit: [0, 1]).first
# Pop item off the queue and add it to the work queue. If the job can't be popped from
# the queue, it's because another process already popped it so we can move on to the
# next one.
@@ -47,5 +74,38 @@ module Gitlab
end
end
end
+
+ def atomic_find_jobs_and_enqueue(now, sorted_sets)
+ Sidekiq.redis do |conn|
+ sorted_sets.each do |sorted_set|
+ start_time = ::Gitlab::Metrics::System.monotonic_time
+ jobs = 0
+
+ Sidekiq.logger.info(message: 'Atomically enqueuing scheduled jobs', status: 'start', sorted_set: sorted_set)
+
+ while job = redis_eval_lua(conn, LUA_ZPOPBYSCORE, LUA_ZPOPBYSCORE_SHA, keys: [sorted_set], argv: [now])
+ jobs += 1
+ Sidekiq::Client.push(Sidekiq.load_json(job))
+ end
+
+ end_time = ::Gitlab::Metrics::System.monotonic_time
+ Sidekiq.logger.info(message: 'Atomically enqueuing scheduled jobs',
+ status: 'done',
+ sorted_set: sorted_set,
+ jobs_count: jobs,
+ duration_s: end_time - start_time)
+ end
+ end
+ end
+
+ def redis_eval_lua(conn, script, sha, keys: nil, argv: nil)
+ conn.evalsha(sha, keys: keys, argv: argv)
+ rescue ::Redis::CommandError => e
+ if e.message.start_with?('NOSCRIPT')
+ conn.eval(script, keys: keys, argv: argv)
+ else
+ raise
+ end
+ end
end
end
diff --git a/lib/gitlab/sidekiq_logging/deduplication_logger.rb b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
index c5654819ffb..f4f6cb2a306 100644
--- a/lib/gitlab/sidekiq_logging/deduplication_logger.rb
+++ b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
@@ -6,7 +6,7 @@ module Gitlab
include Singleton
include LogsJobs
- def log(job, deduplication_type, deduplication_options = {})
+ def deduplicated_log(job, deduplication_type, deduplication_options = {})
payload = parse_job(job)
payload['job_status'] = 'deduplicated'
payload['message'] = "#{base_message(payload)}: deduplicated: #{deduplication_type}"
@@ -17,6 +17,14 @@ module Gitlab
Sidekiq.logger.info payload
end
+
+ def rescheduled_log(job)
+ payload = parse_job(job)
+ payload['job_status'] = 'rescheduled'
+ payload['message'] = "#{base_message(payload)}: rescheduled"
+
+ Sidekiq.logger.info payload
+ end
end
end
end
diff --git a/lib/gitlab/sidekiq_logging/json_formatter.rb b/lib/gitlab/sidekiq_logging/json_formatter.rb
index 8894b48417c..a6281bbdf26 100644
--- a/lib/gitlab/sidekiq_logging/json_formatter.rb
+++ b/lib/gitlab/sidekiq_logging/json_formatter.rb
@@ -6,7 +6,7 @@ require 'json'
module Gitlab
module SidekiqLogging
class JSONFormatter
- TIMESTAMP_FIELDS = %w[created_at enqueued_at started_at retried_at failed_at completed_at].freeze
+ TIMESTAMP_FIELDS = %w[created_at scheduled_at enqueued_at started_at retried_at failed_at completed_at].freeze
def call(severity, timestamp, progname, data)
output = {
diff --git a/lib/gitlab/sidekiq_middleware.rb b/lib/gitlab/sidekiq_middleware.rb
index c97b1632bf8..69802fd6217 100644
--- a/lib/gitlab/sidekiq_middleware.rb
+++ b/lib/gitlab/sidekiq_middleware.rb
@@ -33,6 +33,7 @@ module Gitlab
chain.add ::Gitlab::SidekiqMiddleware::BatchLoader
chain.add ::Gitlab::SidekiqMiddleware::InstrumentationLogger
chain.add ::Gitlab::SidekiqMiddleware::AdminMode::Server
+ chain.add ::Gitlab::SidekiqMiddleware::QueryAnalyzer if Gitlab.dev_or_test_env? || Gitlab::Utils.to_boolean(ENV['GITLAB_ENABLE_QUERY_ANALYZERS'], default: false)
chain.add ::Gitlab::SidekiqVersioning::Middleware
chain.add ::Gitlab::SidekiqStatus::ServerMiddleware
chain.add ::Gitlab::SidekiqMiddleware::WorkerContext::Server
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
index e63164efc94..f31262bfcc9 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
@@ -19,11 +19,12 @@ module Gitlab
class DuplicateJob
include Gitlab::Utils::StrongMemoize
- DUPLICATE_KEY_TTL = 6.hours
+ DEFAULT_DUPLICATE_KEY_TTL = 6.hours
WAL_LOCATION_TTL = 60.seconds
MAX_REDIS_RETRIES = 5
DEFAULT_STRATEGY = :until_executing
STRATEGY_NONE = :none
+ DEDUPLICATED_FLAG_VALUE = 1
LUA_SET_WAL_SCRIPT = <<~EOS
local key, wal, offset, ttl = KEYS[1], ARGV[1], tonumber(ARGV[2]), ARGV[3]
@@ -58,7 +59,7 @@ module Gitlab
end
# This method will return the jid that was set in redis
- def check!(expiry = DUPLICATE_KEY_TTL)
+ def check!(expiry = duplicate_key_ttl)
read_jid = nil
read_wal_locations = {}
@@ -83,7 +84,11 @@ module Gitlab
Sidekiq.redis do |redis|
redis.multi do |multi|
job_wal_locations.each do |connection_name, location|
- multi.eval(LUA_SET_WAL_SCRIPT, keys: [wal_location_key(connection_name)], argv: [location, pg_wal_lsn_diff(connection_name).to_i, WAL_LOCATION_TTL])
+ multi.eval(
+ LUA_SET_WAL_SCRIPT,
+ keys: [wal_location_key(connection_name)],
+ argv: [location, pg_wal_lsn_diff(connection_name).to_i, WAL_LOCATION_TTL]
+ )
end
end
end
@@ -110,12 +115,18 @@ module Gitlab
def delete!
Sidekiq.redis do |redis|
redis.multi do |multi|
- multi.del(idempotency_key)
+ multi.del(idempotency_key, deduplicated_flag_key)
delete_wal_locations!(multi)
end
end
end
+ def reschedule
+ Gitlab::SidekiqLogging::DeduplicationLogger.instance.rescheduled_log(job)
+
+ worker_klass.perform_async(*arguments)
+ end
+
def scheduled?
scheduled_at.present?
end
@@ -126,6 +137,22 @@ module Gitlab
jid != existing_jid
end
+ def set_deduplicated_flag!(expiry = duplicate_key_ttl)
+ return unless reschedulable?
+
+ Sidekiq.redis do |redis|
+ redis.set(deduplicated_flag_key, DEDUPLICATED_FLAG_VALUE, ex: expiry, nx: true)
+ end
+ end
+
+ def should_reschedule?
+ return false unless reschedulable?
+
+ Sidekiq.redis do |redis|
+ redis.get(deduplicated_flag_key).present?
+ end
+ end
+
def scheduled_at
job['at']
end
@@ -145,6 +172,10 @@ module Gitlab
worker_klass.idempotent?
end
+ def duplicate_key_ttl
+ options[:ttl] || DEFAULT_DUPLICATE_KEY_TTL
+ end
+
private
attr_writer :existing_wal_locations
@@ -181,7 +212,12 @@ module Gitlab
end
def pg_wal_lsn_diff(connection_name)
- Gitlab::Database.databases[connection_name].pg_wal_lsn_diff(job_wal_locations[connection_name], existing_wal_locations[connection_name])
+ model = Gitlab::Database.database_base_models[connection_name]
+
+ model.connection.load_balancer.wal_diff(
+ job_wal_locations[connection_name],
+ existing_wal_locations[connection_name]
+ )
end
def strategy
@@ -216,6 +252,10 @@ module Gitlab
@idempotency_key ||= job['idempotency_key'] || "#{namespace}:#{idempotency_hash}"
end
+ def deduplicated_flag_key
+ "#{idempotency_key}:deduplicate_flag"
+ end
+
def idempotency_hash
Digest::SHA256.hexdigest(idempotency_string)
end
@@ -235,6 +275,10 @@ module Gitlab
def preserve_wal_location?
Feature.enabled?(:preserve_latest_wal_locations_for_idempotent_jobs, default_enabled: :yaml)
end
+
+ def reschedulable?
+ !scheduled? && options[:if_deduplicated] == :reschedule_once
+ end
end
end
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb
index a35edc5774e..6d5d41902ea 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/server.rb
@@ -5,7 +5,7 @@ module Gitlab
module DuplicateJobs
class Server
def call(worker, job, queue, &block)
- DuplicateJob.new(job, queue).perform(&block)
+ ::Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(job, queue).perform(&block)
end
end
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/base.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/base.rb
index df5df590281..9b3066bae6c 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/base.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/base.rb
@@ -26,8 +26,8 @@ module Gitlab
end
def check!
- # The default expiry time is the DuplicateJob::DUPLICATE_KEY_TTL already
- # Only the strategies de-duplicating when scheduling
+ # The default expiry time is the worker class'
+ # configured deduplication TTL or DuplicateJob::DEFAULT_DUPLICATE_KEY_TTL.
duplicate_job.check!
end
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb
index b0da85b74a6..0fc95534e2a 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb
@@ -6,6 +6,7 @@ module Gitlab
module Strategies
class DeduplicatesWhenScheduling < Base
extend ::Gitlab::Utils::Override
+ include ::Gitlab::Utils::StrongMemoize
override :initialize
def initialize(duplicate_job)
@@ -19,8 +20,9 @@ module Gitlab
if duplicate_job.idempotent?
duplicate_job.update_latest_wal_location!
+ duplicate_job.set_deduplicated_flag!(expiry)
- Gitlab::SidekiqLogging::DeduplicationLogger.instance.log(
+ Gitlab::SidekiqLogging::DeduplicationLogger.instance.deduplicated_log(
job, "dropped #{strategy_name}", duplicate_job.options)
return false
end
@@ -49,11 +51,16 @@ module Gitlab
end
def expiry
- return DuplicateJob::DUPLICATE_KEY_TTL unless duplicate_job.scheduled?
+ strong_memoize(:expiry) do
+ next duplicate_job.duplicate_key_ttl unless duplicate_job.scheduled?
- time_diff = duplicate_job.scheduled_at.to_i - Time.now.to_i
+ time_diff = [
+ duplicate_job.scheduled_at.to_i - Time.now.to_i,
+ 0
+ ].max
- time_diff > 0 ? time_diff : DuplicateJob::DUPLICATE_KEY_TTL
+ time_diff + duplicate_job.duplicate_key_ttl
+ end
end
end
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed.rb
index 25f1b8b7c51..8c7e15364f8 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed.rb
@@ -14,7 +14,10 @@ module Gitlab
yield
+ should_reschedule = duplicate_job.should_reschedule?
+ # Deleting before rescheduling to make sure we don't deduplicate again.
duplicate_job.delete!
+ duplicate_job.reschedule if should_reschedule
end
end
end
diff --git a/lib/gitlab/sidekiq_middleware/query_analyzer.rb b/lib/gitlab/sidekiq_middleware/query_analyzer.rb
new file mode 100644
index 00000000000..4478fcd3594
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/query_analyzer.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqMiddleware
+ class QueryAnalyzer
+ def call(worker, job, queue)
+ ::Gitlab::Database::QueryAnalyzer.instance.within { yield }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/size_limiter/validator.rb b/lib/gitlab/sidekiq_middleware/size_limiter/validator.rb
index 71316bbd243..6186c9ad1f4 100644
--- a/lib/gitlab/sidekiq_middleware/size_limiter/validator.rb
+++ b/lib/gitlab/sidekiq_middleware/size_limiter/validator.rb
@@ -55,18 +55,15 @@ module Gitlab
attr_reader :mode, :size_limit, :compression_threshold
- def initialize(
- worker_class, job,
- mode: Gitlab::CurrentSettings.sidekiq_job_limiter_mode,
- compression_threshold: Gitlab::CurrentSettings.sidekiq_job_limiter_compression_threshold_bytes,
- size_limit: Gitlab::CurrentSettings.sidekiq_job_limiter_limit_bytes
- )
+ def initialize(worker_class, job)
@worker_class = worker_class
@job = job
- set_mode(mode)
- set_compression_threshold(compression_threshold)
- set_size_limit(size_limit)
+ current_settings = Gitlab::CurrentSettings.current_application_settings
+
+ @mode = current_settings.sidekiq_job_limiter_mode
+ @compression_threshold = current_settings.sidekiq_job_limiter_compression_threshold_bytes
+ @size_limit = current_settings.sidekiq_job_limiter_limit_bytes
end
def validate!
@@ -90,30 +87,6 @@ module Gitlab
private
- def set_mode(mode)
- @mode = (mode || TRACK_MODE).to_s.strip
- unless MODES.include?(@mode)
- ::Sidekiq.logger.warn "Invalid Sidekiq size limiter mode: #{@mode}. Fallback to #{TRACK_MODE} mode."
- @mode = TRACK_MODE
- end
- end
-
- def set_compression_threshold(compression_threshold)
- @compression_threshold = (compression_threshold || DEFAULT_COMPRESSION_THRESHOLD_BYTES).to_i
- if @compression_threshold <= 0
- ::Sidekiq.logger.warn "Invalid Sidekiq size limiter compression threshold: #{@compression_threshold}"
- @compression_threshold = DEFAULT_COMPRESSION_THRESHOLD_BYTES
- end
- end
-
- def set_size_limit(size_limit)
- @size_limit = (size_limit || DEFAULT_SIZE_LIMIT).to_i
- if @size_limit < 0
- ::Sidekiq.logger.warn "Invalid Sidekiq size limiter limit: #{@size_limit}"
- @size_limit = DEFAULT_SIZE_LIMIT
- end
- end
-
def exceed_limit_error(job_args)
ExceedLimitError.new(@worker_class, job_args.bytesize, @size_limit).tap do |exception|
# This should belong to Gitlab::ErrorTracking. We'll remove this
diff --git a/lib/gitlab/sidekiq_status.rb b/lib/gitlab/sidekiq_status.rb
index 623fdd89456..fbf2718d718 100644
--- a/lib/gitlab/sidekiq_status.rb
+++ b/lib/gitlab/sidekiq_status.rb
@@ -7,12 +7,16 @@ module Gitlab
# To check if a job has been completed, simply pass the job ID to the
# `completed?` method:
#
- # job_id = SomeWorker.perform_async(...)
+ # job_id = SomeWorker.with_status.perform_async(...)
#
# if Gitlab::SidekiqStatus.completed?(job_id)
# ...
# end
#
+ # If you do not use `with_status`, and the worker class does not declare
+ # `status_expiration` in its `sidekiq_options`, then this status will not be
+ # stored.
+ #
# For each job ID registered a separate key is stored in Redis, making lookups
# much faster than using Sidekiq's built-in job finding/status API. These keys
# expire after a certain period of time to prevent storing too many keys in
diff --git a/lib/gitlab/slash_commands/result.rb b/lib/gitlab/slash_commands/result.rb
index a66a2e0726b..d488606120f 100644
--- a/lib/gitlab/slash_commands/result.rb
+++ b/lib/gitlab/slash_commands/result.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -5,3 +6,5 @@ module Gitlab
Result = Struct.new(:type, :message)
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/spamcheck/client.rb b/lib/gitlab/spamcheck/client.rb
index df6d3eb7d0a..925ca44dfc9 100644
--- a/lib/gitlab/spamcheck/client.rb
+++ b/lib/gitlab/spamcheck/client.rb
@@ -5,6 +5,7 @@ module Gitlab
module Spamcheck
class Client
include ::Spam::SpamConstants
+
DEFAULT_TIMEOUT_SECS = 2
VERDICT_MAPPING = {
@@ -27,12 +28,7 @@ module Gitlab
# connect with Spamcheck
@endpoint_url = @endpoint_url.gsub(%r(^grpc:\/\/), '')
- @creds =
- if Rails.env.development? || Rails.env.test?
- :this_channel_is_insecure
- else
- GRPC::Core::ChannelCredentials.new
- end
+ @creds = stub_creds
end
def issue_spam?(spam_issue:, user:, context: {})
@@ -73,6 +69,8 @@ module Gitlab
user_pb.emails << build_email(user.email, user.confirmed?)
user.emails.each do |email|
+ next if email.user_primary_email?
+
user_pb.emails << build_email(email.email, email.confirmed?)
end
@@ -98,6 +96,14 @@ module Gitlab
nanos: ar_timestamp.to_time.nsec)
end
+ def stub_creds
+ if Rails.env.development? || Rails.env.test?
+ :this_channel_is_insecure
+ else
+ GRPC::Core::ChannelCredentials.new ::Gitlab::X509::Certificate.ca_certs_bundle
+ end
+ end
+
def grpc_client
@grpc_client ||= ::Spamcheck::SpamcheckService::Stub.new(@endpoint_url, @creds,
interceptors: interceptors,
diff --git a/lib/gitlab/subscription_portal.rb b/lib/gitlab/subscription_portal.rb
index 9b6bae12057..4f6d25097e2 100644
--- a/lib/gitlab/subscription_portal.rb
+++ b/lib/gitlab/subscription_portal.rb
@@ -4,11 +4,7 @@ module Gitlab
module SubscriptionPortal
def self.default_subscriptions_url
if ::Gitlab.dev_or_test_env?
- if Feature.enabled?(:new_customersdot_staging_url, default_enabled: :yaml)
- 'https://customers.staging.gitlab.com'
- else
- 'https://customers.stg.gitlab.com'
- end
+ 'https://customers.staging.gitlab.com'
else
'https://customers.gitlab.com'
end
@@ -43,7 +39,7 @@ module Gitlab
end
def self.subscriptions_plans_url
- "#{self.subscriptions_url}/plans"
+ Gitlab::Saas.about_pricing_url
end
def self.subscriptions_gitlab_plans_url
diff --git a/lib/gitlab/template_parser/ast.rb b/lib/gitlab/template_parser/ast.rb
index 89318ee0d68..c6a5f66c377 100644
--- a/lib/gitlab/template_parser/ast.rb
+++ b/lib/gitlab/template_parser/ast.rb
@@ -1,3 +1,4 @@
+# rubocop:disable Naming/FileName
# frozen_string_literal: true
module Gitlab
@@ -155,3 +156,5 @@ module Gitlab
end
end
end
+
+# rubocop:enable Naming/FileName
diff --git a/lib/gitlab/testing/request_inspector_middleware.rb b/lib/gitlab/testing/request_inspector_middleware.rb
index 36cdfebcc28..3cbe97cd84c 100644
--- a/lib/gitlab/testing/request_inspector_middleware.rb
+++ b/lib/gitlab/testing/request_inspector_middleware.rb
@@ -9,6 +9,8 @@ module Gitlab
@@logged_requests = Concurrent::Array.new
@@inject_headers = Concurrent::Hash.new
+ Request = Struct.new(:url, :status_code, :request_headers, :response_headers, :body, keyword_init: true)
+
# Resets the current request log and starts logging requests
def self.log_requests!(headers = {})
@@inject_headers.replace(headers)
@@ -40,7 +42,7 @@ module Gitlab
full_body = +''
body.each { |b| full_body << b }
- request = OpenStruct.new(
+ request = Request.new(
url: url,
status_code: status,
request_headers: request_headers,
diff --git a/lib/gitlab/tracking.rb b/lib/gitlab/tracking.rb
index f4fbea50515..ec032cf2d3c 100644
--- a/lib/gitlab/tracking.rb
+++ b/lib/gitlab/tracking.rb
@@ -6,38 +6,37 @@ module Gitlab
class << self
def enabled?
- Gitlab::CurrentSettings.snowplow_enabled?
+ snowplow_micro_enabled? || Gitlab::CurrentSettings.snowplow_enabled?
end
def event(category, action, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists
contexts = [Tracking::StandardContext.new(project: project, user: user, namespace: namespace, **extra).to_context, *context]
snowplow.event(category, action, label: label, property: property, value: value, context: contexts)
- product_analytics.event(category, action, label: label, property: property, value: value, context: contexts)
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error, snowplow_category: category, snowplow_action: action)
end
def options(group)
- additional_features = Feature.enabled?(:additional_snowplow_tracking, group)
- {
- namespace: SNOWPLOW_NAMESPACE,
- hostname: Gitlab::CurrentSettings.snowplow_collector_hostname,
- cookie_domain: Gitlab::CurrentSettings.snowplow_cookie_domain,
- app_id: Gitlab::CurrentSettings.snowplow_app_id,
- form_tracking: additional_features,
- link_click_tracking: additional_features
- }.transform_keys! { |key| key.to_s.camelize(:lower).to_sym }
+ snowplow.options(group)
+ end
+
+ def collector_hostname
+ snowplow.hostname
end
private
def snowplow
- @snowplow ||= Gitlab::Tracking::Destinations::Snowplow.new
+ @snowplow ||= if snowplow_micro_enabled?
+ Gitlab::Tracking::Destinations::SnowplowMicro.new
+ else
+ Gitlab::Tracking::Destinations::Snowplow.new
+ end
end
- def product_analytics
- @product_analytics ||= Gitlab::Tracking::Destinations::ProductAnalytics.new
+ def snowplow_micro_enabled?
+ Rails.env.development? && Gitlab::Utils.to_boolean(ENV['SNOWPLOW_MICRO_ENABLE'])
end
end
end
diff --git a/lib/gitlab/tracking/destinations/product_analytics.rb b/lib/gitlab/tracking/destinations/product_analytics.rb
deleted file mode 100644
index cacedbc5b83..00000000000
--- a/lib/gitlab/tracking/destinations/product_analytics.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Tracking
- module Destinations
- class ProductAnalytics < Base
- extend ::Gitlab::Utils::Override
- include ::Gitlab::Utils::StrongMemoize
-
- override :event
- def event(category, action, label: nil, property: nil, value: nil, context: nil)
- return unless event_allowed?(category, action)
- return unless enabled?
-
- tracker.track_struct_event(category, action, label, property, value, context, (Time.now.to_f * 1000).to_i)
- end
-
- private
-
- def event_allowed?(category, action)
- category == 'epics' && action == 'promote'
- end
-
- def enabled?
- Feature.enabled?(:product_analytics_tracking, type: :ops) &&
- Gitlab::CurrentSettings.usage_ping_enabled? &&
- Gitlab::CurrentSettings.self_monitoring_project_id.present?
- end
-
- def tracker
- @tracker ||= SnowplowTracker::Tracker.new(
- SnowplowTracker::AsyncEmitter.new(::ProductAnalytics::Tracker::COLLECTOR_URL, protocol: Gitlab.config.gitlab.protocol),
- SnowplowTracker::Subject.new,
- Gitlab::Tracking::SNOWPLOW_NAMESPACE,
- Gitlab::CurrentSettings.self_monitoring_project_id.to_s
- )
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/tracking/destinations/snowplow.rb b/lib/gitlab/tracking/destinations/snowplow.rb
index 07a53b0892b..5596e9acd30 100644
--- a/lib/gitlab/tracking/destinations/snowplow.rb
+++ b/lib/gitlab/tracking/destinations/snowplow.rb
@@ -16,25 +16,53 @@ module Gitlab
increment_total_events_counter
end
+ def options(group)
+ additional_features = Feature.enabled?(:additional_snowplow_tracking, group, type: :ops)
+ {
+ namespace: Gitlab::Tracking::SNOWPLOW_NAMESPACE,
+ hostname: hostname,
+ cookie_domain: cookie_domain,
+ app_id: app_id,
+ form_tracking: additional_features,
+ link_click_tracking: additional_features
+ }.transform_keys! { |key| key.to_s.camelize(:lower).to_sym }
+ end
+
+ def hostname
+ Gitlab::CurrentSettings.snowplow_collector_hostname
+ end
+
private
def enabled?
Gitlab::Tracking.enabled?
end
+ def app_id
+ Gitlab::CurrentSettings.snowplow_app_id
+ end
+
+ def protocol
+ 'https'
+ end
+
+ def cookie_domain
+ Gitlab::CurrentSettings.snowplow_cookie_domain
+ end
+
def tracker
@tracker ||= SnowplowTracker::Tracker.new(
emitter,
SnowplowTracker::Subject.new,
Gitlab::Tracking::SNOWPLOW_NAMESPACE,
- Gitlab::CurrentSettings.snowplow_app_id
+ app_id
)
end
def emitter
SnowplowTracker::AsyncEmitter.new(
- Gitlab::CurrentSettings.snowplow_collector_hostname,
- protocol: 'https',
+ hostname,
+ protocol: protocol,
on_success: method(:increment_successful_events_emissions),
on_failure: method(:failure_callback)
)
@@ -68,8 +96,6 @@ module Gitlab
end
def log_failures(failures)
- hostname = Gitlab::CurrentSettings.snowplow_collector_hostname
-
failures.each do |failure|
Gitlab::AppLogger.error("#{failure["se_ca"]} #{failure["se_ac"]} failed to be reported to collector at #{hostname}")
end
diff --git a/lib/gitlab/tracking/destinations/snowplow_micro.rb b/lib/gitlab/tracking/destinations/snowplow_micro.rb
new file mode 100644
index 00000000000..b818d349a6d
--- /dev/null
+++ b/lib/gitlab/tracking/destinations/snowplow_micro.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+#
+module Gitlab
+ module Tracking
+ module Destinations
+ class SnowplowMicro < Snowplow
+ include ::Gitlab::Utils::StrongMemoize
+ extend ::Gitlab::Utils::Override
+
+ DEFAULT_URI = 'http://localhost:9090'
+
+ override :options
+ def options(group)
+ super.update(
+ protocol: uri.scheme,
+ port: uri.port,
+ force_secure_tracker: false
+ )
+ end
+
+ override :hostname
+ def hostname
+ "#{uri.host}:#{uri.port}"
+ end
+
+ private
+
+ def uri
+ strong_memoize(:snowplow_uri) do
+ uri = URI(ENV['SNOWPLOW_MICRO_URI'] || DEFAULT_URI)
+ uri = URI("http://#{ENV['SNOWPLOW_MICRO_URI']}") unless %w[http https].include?(uri.scheme)
+ uri
+ end
+ end
+
+ override :cookie_domain
+ def cookie_domain
+ '.gitlab.com'
+ end
+
+ override :protocol
+ def protocol
+ uri.scheme
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/tracking/standard_context.rb b/lib/gitlab/tracking/standard_context.rb
index df62e8bbbe6..837390b91fb 100644
--- a/lib/gitlab/tracking/standard_context.rb
+++ b/lib/gitlab/tracking/standard_context.rb
@@ -43,15 +43,8 @@ module Gitlab
environment: environment,
source: source,
plan: plan,
- extra: extra
- }.merge(project_and_namespace)
- .merge(user_data)
- end
-
- def project_and_namespace
- return {} unless ::Feature.enabled?(:add_namespace_and_project_to_snowplow_tracking, default_enabled: :yaml)
-
- {
+ extra: extra,
+ user_id: user&.id,
namespace_id: namespace&.id,
project_id: project_id
}
@@ -60,10 +53,6 @@ module Gitlab
def project_id
project.is_a?(Integer) ? project : project&.id
end
-
- def user_data
- ::Feature.enabled?(:add_actor_based_user_to_snowplow_tracking, user) ? { user_id: user&.id } : {}
- end
end
end
end
diff --git a/lib/gitlab/url_blocker.rb b/lib/gitlab/url_blocker.rb
index 10822f943b6..2c5d76ba41d 100644
--- a/lib/gitlab/url_blocker.rb
+++ b/lib/gitlab/url_blocker.rb
@@ -164,15 +164,21 @@ module Gitlab
end
def parse_url(url)
- raise Addressable::URI::InvalidURIError if multiline?(url)
-
- Addressable::URI.parse(url)
+ Addressable::URI.parse(url).tap do |parsed_url|
+ raise Addressable::URI::InvalidURIError if multiline_blocked?(parsed_url)
+ end
rescue Addressable::URI::InvalidURIError, URI::InvalidURIError
raise BlockedUrlError, 'URI is invalid'
end
- def multiline?(url)
- CGI.unescape(url.to_s) =~ /\n|\r/
+ def multiline_blocked?(parsed_url)
+ url = parsed_url.to_s
+
+ return true if url =~ /\n|\r/
+ # Google Cloud Storage uses a multi-line, encoded Signature query string
+ return false if %w(http https).include?(parsed_url.scheme&.downcase)
+
+ CGI.unescape(url) =~ /\n|\r/
end
def validate_port(port, ports)
diff --git a/lib/gitlab/usage/metric.rb b/lib/gitlab/usage/metric.rb
index 5b1ac189c13..24e044c5740 100644
--- a/lib/gitlab/usage/metric.rb
+++ b/lib/gitlab/usage/metric.rb
@@ -25,6 +25,10 @@ module Gitlab
unflatten_key_path(intrumentation_object.instrumentation)
end
+ def with_suggested_name
+ unflatten_key_path(intrumentation_object.suggested_name)
+ end
+
private
def unflatten_key_path(value)
diff --git a/lib/gitlab/usage/metrics/names_suggestions/generator.rb b/lib/gitlab/usage/metrics/names_suggestions/generator.rb
index b47dc5689d4..6dcbe5f5fe5 100644
--- a/lib/gitlab/usage/metrics/names_suggestions/generator.rb
+++ b/lib/gitlab/usage/metrics/names_suggestions/generator.rb
@@ -10,14 +10,18 @@ module Gitlab
uncached_data.deep_stringify_keys.dig(*key_path.split('.'))
end
- def add_metric(metric, time_frame: 'none')
+ def add_metric(metric, time_frame: 'none', options: {})
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
- metric_class.new(time_frame: time_frame).suggested_name
+ metric_class.new(time_frame: time_frame, options: options).suggested_name
end
private
+ def instrumentation_metrics
+ ::Gitlab::UsageDataMetrics.suggested_names
+ end
+
def count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil)
Gitlab::Usage::Metrics::NameSuggestion.for(:count, column: column, relation: relation)
end
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index dd66f9133bb..20e526aeefa 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -45,23 +45,10 @@ module Gitlab
clear_memoized
with_finished_at(:recording_ce_finished_at) do
- license_usage_data
- .merge(system_usage_data_license)
- .merge(system_usage_data_settings)
- .merge(system_usage_data)
- .merge(system_usage_data_monthly)
- .merge(system_usage_data_weekly)
- .merge(features_usage_data)
- .merge(components_usage_data)
- .merge(object_store_usage_data)
- .merge(topology_usage_data)
- .merge(usage_activity_by_stage)
- .merge(usage_activity_by_stage(:usage_activity_by_stage_monthly, monthly_time_range_db_params))
- .merge(analytics_unique_visits_data)
- .merge(compliance_unique_visits_data)
- .merge(search_unique_visits_data)
- .merge(redis_hll_counters)
- .deep_merge(aggregated_metrics_data)
+ usage_data = usage_data_metrics
+ usage_data = usage_data.with_indifferent_access.deep_merge(instrumentation_metrics.with_indifferent_access) if Feature.enabled?(:usage_data_instrumentation)
+
+ usage_data
end
end
@@ -309,9 +296,11 @@ module Gitlab
version: alt_usage_data(fallback: nil) { Gitlab::CurrentSettings.container_registry_version }
},
database: {
- adapter: alt_usage_data { Gitlab::Database.main.adapter_name },
- version: alt_usage_data { Gitlab::Database.main.version },
- pg_system_id: alt_usage_data { Gitlab::Database.main.system_id }
+ # rubocop: disable UsageData/LargeTable
+ adapter: alt_usage_data { ApplicationRecord.database.adapter_name },
+ version: alt_usage_data { ApplicationRecord.database.version },
+ pg_system_id: alt_usage_data { ApplicationRecord.database.system_id }
+ # rubocop: enable UsageData/LargeTable
},
mail: {
smtp_server: alt_usage_data { ActionMailer::Base.smtp_settings[:address] }
@@ -549,7 +538,8 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def usage_activity_by_stage_manage(time_period)
{
- events: distinct_count(::Event.where(time_period), :author_id),
+ # rubocop: disable UsageData/LargeTable
+ events: stage_manage_events(time_period),
groups: distinct_count(::GroupMember.where(time_period), :user_id),
users_created: count(::User.where(time_period), start: minimum_id(User), finish: maximum_id(User)),
omniauth_providers: filtered_omniauth_provider_names.reject { |name| name == 'group_saml' },
@@ -628,9 +618,9 @@ module Gitlab
todos: distinct_count(::Todo.where(time_period), :author_id),
service_desk_enabled_projects: distinct_count_service_desk_enabled_projects(time_period),
service_desk_issues: count(::Issue.service_desk.where(time_period)),
- projects_jira_active: distinct_count(::Project.with_active_integration(::Integrations::Jira) .where(time_period), :creator_id),
- projects_jira_dvcs_cloud_active: distinct_count(::Project.with_active_integration(::Integrations::Jira) .with_jira_dvcs_cloud.where(time_period), :creator_id),
- projects_jira_dvcs_server_active: distinct_count(::Project.with_active_integration(::Integrations::Jira) .with_jira_dvcs_server.where(time_period), :creator_id)
+ projects_jira_active: distinct_count(::Project.with_active_integration(::Integrations::Jira).where(time_period), :creator_id),
+ projects_jira_dvcs_cloud_active: distinct_count(::Project.with_active_integration(::Integrations::Jira).with_jira_dvcs_cloud.where(time_period), :creator_id),
+ projects_jira_dvcs_server_active: distinct_count(::Project.with_active_integration(::Integrations::Jira).with_jira_dvcs_server.where(time_period), :creator_id)
}
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -729,6 +719,44 @@ module Gitlab
private
+ def stage_manage_events(time_period)
+ if time_period.empty?
+ Gitlab::Utils::UsageData::FALLBACK
+ else
+ # rubocop: disable CodeReuse/ActiveRecord
+ # rubocop: disable UsageData/LargeTable
+ start = ::Event.where(time_period).select(:id).order(created_at: :asc).first&.id
+ finish = ::Event.where(time_period).select(:id).order(created_at: :asc).first&.id
+ estimate_batch_distinct_count(::Event.where(time_period), :author_id, start: start, finish: finish)
+ # rubocop: enable UsageData/LargeTable
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+
+ def usage_data_metrics
+ license_usage_data
+ .merge(system_usage_data_license)
+ .merge(system_usage_data_settings)
+ .merge(system_usage_data)
+ .merge(system_usage_data_monthly)
+ .merge(system_usage_data_weekly)
+ .merge(features_usage_data)
+ .merge(components_usage_data)
+ .merge(object_store_usage_data)
+ .merge(topology_usage_data)
+ .merge(usage_activity_by_stage)
+ .merge(usage_activity_by_stage(:usage_activity_by_stage_monthly, monthly_time_range_db_params))
+ .merge(analytics_unique_visits_data)
+ .merge(compliance_unique_visits_data)
+ .merge(search_unique_visits_data)
+ .merge(redis_hll_counters)
+ .deep_merge(aggregated_metrics_data)
+ end
+
+ def instrumentation_metrics
+ Gitlab::UsageDataMetrics.uncached_data # rubocop:disable UsageData/LargeTable
+ end
+
def metric_time_period(time_period)
time_period.present? ? '28d' : 'none'
end
@@ -805,7 +833,13 @@ module Gitlab
Users::InProductMarketingEmail.tracks.keys.each_with_object({}) do |track, result|
# rubocop: enable UsageData/LargeTable:
- series_amount = Namespaces::InProductMarketingEmailsService::TRACKS[track.to_sym][:interval_days].count
+ series_amount =
+ if track.to_sym == Namespaces::InviteTeamEmailService::TRACK
+ 0
+ else
+ Namespaces::InProductMarketingEmailsService::TRACKS[track.to_sym][:interval_days].count
+ end
+
0.upto(series_amount - 1).map do |series|
# When there is an error with the query and it's not the Hash we expect, we return what we got from `count`.
sent_count = sent_emails.is_a?(Hash) ? sent_emails.fetch([track, series], 0) : sent_emails
@@ -881,7 +915,30 @@ module Gitlab
end
def projects_imported_count(from, time_period)
- count(::Project.imported_from(from).where(time_period).where.not(import_type: nil)) # rubocop: disable CodeReuse/ActiveRecord
+ # rubocop:disable CodeReuse/ActiveRecord
+ relation = ::Project.imported_from(from).where.not(import_type: nil) # rubocop:disable UsageData/LargeTable
+ if time_period.empty?
+ count(relation)
+ else
+ @project_import_id ||= {}
+ start = time_period[:created_at].first
+ finish = time_period[:created_at].last
+
+ # can be nil values here if no records are in our range and it is possible the same instance
+ # is called with different time periods since it is passed in as a variable
+ unless @project_import_id.key?(start)
+ @project_import_id[start] = ::Project.select(:id).where(Project.arel_table[:created_at].gteq(start)) # rubocop:disable UsageData/LargeTable
+ .order(created_at: :asc).limit(1).first&.id
+ end
+
+ unless @project_import_id.key?(finish)
+ @project_import_id[finish] = ::Project.select(:id).where(Project.arel_table[:created_at].lteq(finish)) # rubocop:disable UsageData/LargeTable
+ .order(created_at: :desc).limit(1).first&.id
+ end
+
+ count(relation, start: @project_import_id[start], finish: @project_import_id[finish])
+ end
+ # rubocop:enable CodeReuse/ActiveRecord
end
def issue_imports(time_period)
diff --git a/lib/gitlab/usage_data_counters/known_events/ci_templates.yml b/lib/gitlab/usage_data_counters/known_events/ci_templates.yml
index 99bdd3ca9e9..40922433635 100644
--- a/lib/gitlab/usage_data_counters/known_events/ci_templates.yml
+++ b/lib/gitlab/usage_data_counters/known_events/ci_templates.yml
@@ -83,6 +83,10 @@
category: ci_templates
redis_slot: ci_templates
aggregation: weekly
+- name: p_ci_templates_security_sast_iac_latest
+ category: ci_templates
+ redis_slot: ci_templates
+ aggregation: weekly
- name: p_ci_templates_security_dast_runner_validation
category: ci_templates
redis_slot: ci_templates
@@ -267,6 +271,10 @@
category: ci_templates
redis_slot: ci_templates
aggregation: weekly
+- name: p_ci_templates_jobs_sast_iac_latest
+ category: ci_templates
+ redis_slot: ci_templates
+ aggregation: weekly
- name: p_ci_templates_jobs_secret_detection
category: ci_templates
redis_slot: ci_templates
@@ -447,6 +455,10 @@
category: ci_templates
redis_slot: ci_templates
aggregation: weekly
+- name: p_ci_templates_implicit_jobs_sast_iac_latest
+ category: ci_templates
+ redis_slot: ci_templates
+ aggregation: weekly
- name: p_ci_templates_implicit_jobs_secret_detection
category: ci_templates
redis_slot: ci_templates
@@ -503,6 +515,10 @@
category: ci_templates
redis_slot: ci_templates
aggregation: weekly
+- name: p_ci_templates_implicit_security_sast_iac_latest
+ category: ci_templates
+ redis_slot: ci_templates
+ aggregation: weekly
- name: p_ci_templates_implicit_security_dast_runner_validation
category: ci_templates
redis_slot: ci_templates
@@ -559,3 +575,7 @@
category: ci_templates
redis_slot: ci_templates
aggregation: weekly
+- name: p_ci_templates_kaniko
+ category: ci_templates
+ redis_slot: ci_templates
+ aggregation: weekly
diff --git a/lib/gitlab/usage_data_counters/known_events/code_review_events.yml b/lib/gitlab/usage_data_counters/known_events/code_review_events.yml
index d4a818f8fe0..d4bc060abf9 100644
--- a/lib/gitlab/usage_data_counters/known_events/code_review_events.yml
+++ b/lib/gitlab/usage_data_counters/known_events/code_review_events.yml
@@ -249,3 +249,27 @@
redis_slot: code_review
category: code_review
aggregation: weekly
+- name: i_code_review_widget_nothing_merge_click_new_file
+ redis_slot: code_review
+ category: code_review
+ aggregation: weekly
+- name: i_code_review_post_merge_delete_branch
+ redis_slot: code_review
+ category: code_review
+ aggregation: weekly
+- name: i_code_review_post_merge_click_revert
+ redis_slot: code_review
+ category: code_review
+ aggregation: weekly
+- name: i_code_review_post_merge_click_cherry_pick
+ redis_slot: code_review
+ category: code_review
+ aggregation: weekly
+- name: i_code_review_post_merge_submit_revert_modal
+ redis_slot: code_review
+ category: code_review
+ aggregation: weekly
+- name: i_code_review_post_merge_submit_cherry_pick_modal
+ redis_slot: code_review
+ category: code_review
+ aggregation: weekly
diff --git a/lib/gitlab/usage_data_counters/known_events/quickactions.yml b/lib/gitlab/usage_data_counters/known_events/quickactions.yml
index 7f77fa8ee02..dff2c4f8d03 100644
--- a/lib/gitlab/usage_data_counters/known_events/quickactions.yml
+++ b/lib/gitlab/usage_data_counters/known_events/quickactions.yml
@@ -119,6 +119,10 @@
category: quickactions
redis_slot: quickactions
aggregation: weekly
+- name: i_quickactions_promote_to_incident
+ category: quickactions
+ redis_slot: quickactions
+ aggregation: weekly
- name: i_quickactions_publish
category: quickactions
redis_slot: quickactions
diff --git a/lib/gitlab/usage_data_counters/vs_code_extension_activity_unique_counter.rb b/lib/gitlab/usage_data_counters/vscode_extension_activity_unique_counter.rb
index 703c4885b04..703c4885b04 100644
--- a/lib/gitlab/usage_data_counters/vs_code_extension_activity_unique_counter.rb
+++ b/lib/gitlab/usage_data_counters/vscode_extension_activity_unique_counter.rb
diff --git a/lib/gitlab/usage_data_metrics.rb b/lib/gitlab/usage_data_metrics.rb
index 1ef201121d9..48f695d5db1 100644
--- a/lib/gitlab/usage_data_metrics.rb
+++ b/lib/gitlab/usage_data_metrics.rb
@@ -5,7 +5,17 @@ module Gitlab
class << self
# Build the Usage Ping JSON payload from metrics YAML definitions which have instrumentation class set
def uncached_data
- ::Gitlab::Usage::Metric.all.map(&:with_value).reduce({}, :deep_merge)
+ build_payload(:with_value)
+ end
+
+ def suggested_names
+ build_payload(:with_suggested_name)
+ end
+
+ private
+
+ def build_payload(method_symbol)
+ ::Gitlab::Usage::Metric.all.map(&method_symbol).reduce({}, :deep_merge)
end
end
end
diff --git a/lib/gitlab/usage_data_non_sql_metrics.rb b/lib/gitlab/usage_data_non_sql_metrics.rb
index 1ff4588d091..be5a571fb82 100644
--- a/lib/gitlab/usage_data_non_sql_metrics.rb
+++ b/lib/gitlab/usage_data_non_sql_metrics.rb
@@ -6,13 +6,16 @@ module Gitlab
class << self
def uncached_data
- super.with_indifferent_access.deep_merge(instrumentation_metrics_queries.with_indifferent_access)
+ # instrumentation_metrics is already included with feature flag enabled
+ return super if Feature.enabled?(:usage_data_instrumentation)
+
+ super.with_indifferent_access.deep_merge(instrumentation_metrics.with_indifferent_access)
end
- def add_metric(metric, time_frame: 'none')
+ def add_metric(metric, time_frame: 'none', options: {})
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
- metric_class.new(time_frame: time_frame).instrumentation
+ metric_class.new(time_frame: time_frame, options: options).instrumentation
end
def count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil)
@@ -50,7 +53,7 @@ module Gitlab
private
- def instrumentation_metrics_queries
+ def instrumentation_metrics
::Gitlab::Usage::Metric.all.map(&:with_instrumentation).reduce({}, :deep_merge)
end
end
diff --git a/lib/gitlab/usage_data_queries.rb b/lib/gitlab/usage_data_queries.rb
index f64da2fbf13..f543b29e43f 100644
--- a/lib/gitlab/usage_data_queries.rb
+++ b/lib/gitlab/usage_data_queries.rb
@@ -6,13 +6,16 @@ module Gitlab
class UsageDataQueries < UsageData
class << self
def uncached_data
- super.with_indifferent_access.deep_merge(instrumentation_metrics_queries.with_indifferent_access)
+ # instrumentation_metrics is already included with feature flag enabled
+ return super if Feature.enabled?(:usage_data_instrumentation)
+
+ super.with_indifferent_access.deep_merge(instrumentation_metrics.with_indifferent_access)
end
- def add_metric(metric, time_frame: 'none')
+ def add_metric(metric, time_frame: 'none', options: {})
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
- metric_class.new(time_frame: time_frame).instrumentation
+ metric_class.new(time_frame: time_frame, options: options).instrumentation
end
def count(relation, column = nil, *args, **kwargs)
@@ -71,7 +74,7 @@ module Gitlab
private
- def instrumentation_metrics_queries
+ def instrumentation_metrics
::Gitlab::Usage::Metric.all.map(&:with_instrumentation).reduce({}, :deep_merge)
end
end
diff --git a/lib/gitlab/utils/usage_data.rb b/lib/gitlab/utils/usage_data.rb
index d46210f6efe..77f04929661 100644
--- a/lib/gitlab/utils/usage_data.rb
+++ b/lib/gitlab/utils/usage_data.rb
@@ -43,11 +43,16 @@ module Gitlab
HISTOGRAM_FALLBACK = { '-1' => -1 }.freeze
DISTRIBUTED_HLL_FALLBACK = -2
MAX_BUCKET_SIZE = 100
+ INSTRUMENTATION_CLASS_FALLBACK = -100
+
+ def add_metric(metric, time_frame: 'none', options: {})
+ # Results of this method should be overwritten by instrumentation class values
+ # -100 indicates the metric was not properly merged.
+ return INSTRUMENTATION_CLASS_FALLBACK if Feature.enabled?(:usage_data_instrumentation)
- def add_metric(metric, time_frame: 'none')
metric_class = "Gitlab::Usage::Metrics::Instrumentations::#{metric}".constantize
- metric_class.new(time_frame: time_frame).value
+ metric_class.new(time_frame: time_frame, options: options).value
end
def count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil)
diff --git a/lib/gitlab/webpack/file_loader.rb b/lib/gitlab/webpack/file_loader.rb
new file mode 100644
index 00000000000..35ecb1eb4ed
--- /dev/null
+++ b/lib/gitlab/webpack/file_loader.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'net/http'
+require 'uri'
+
+module Gitlab
+ module Webpack
+ class FileLoader
+ class BaseError < StandardError
+ attr_reader :original_error, :uri
+
+ def initialize(uri, orig)
+ super orig.message
+ @uri = uri.to_s
+ @original_error = orig
+ end
+ end
+
+ StaticLoadError = Class.new(BaseError)
+ DevServerLoadError = Class.new(BaseError)
+ DevServerSSLError = Class.new(BaseError)
+
+ def self.load(path)
+ if Gitlab.config.webpack.dev_server.enabled
+ self.load_from_dev_server(path)
+ else
+ self.load_from_static(path)
+ end
+ end
+
+ def self.load_from_dev_server(path)
+ host = Gitlab.config.webpack.dev_server.host
+ port = Gitlab.config.webpack.dev_server.port
+ scheme = Gitlab.config.webpack.dev_server.https ? 'https' : 'http'
+ uri = Addressable::URI.new(scheme: scheme, host: host, port: port, path: self.dev_server_path(path))
+
+ # localhost could be blocked via Gitlab::HTTP
+ response = HTTParty.get(uri.to_s, verify: false) # rubocop:disable Gitlab/HTTParty
+
+ return response.body if response.code == 200
+
+ raise "HTTP error #{response.code}"
+ rescue OpenSSL::SSL::SSLError, EOFError => e
+ raise DevServerSSLError.new(uri, e)
+ rescue StandardError => e
+ raise DevServerLoadError.new(uri, e)
+ end
+
+ def self.load_from_static(path)
+ file_uri = ::Rails.root.join(
+ Gitlab.config.webpack.output_dir,
+ path
+ )
+
+ File.read(file_uri)
+ rescue StandardError => e
+ raise StaticLoadError.new(file_uri, e)
+ end
+
+ def self.dev_server_path(path)
+ "/#{Gitlab.config.webpack.public_path}/#{path}"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/webpack/graphql_known_operations.rb b/lib/gitlab/webpack/graphql_known_operations.rb
new file mode 100644
index 00000000000..7945513667c
--- /dev/null
+++ b/lib/gitlab/webpack/graphql_known_operations.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Webpack
+ class GraphqlKnownOperations
+ class << self
+ include Gitlab::Utils::StrongMemoize
+
+ def clear_memoization!
+ clear_memoization(:graphql_known_operations)
+ end
+
+ def load
+ strong_memoize(:graphql_known_operations) do
+ data = ::Gitlab::Webpack::FileLoader.load("graphql_known_operations.yml")
+
+ YAML.safe_load(data)
+ rescue StandardError
+ []
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/webpack/manifest.rb b/lib/gitlab/webpack/manifest.rb
index b73c2ebb578..06cddc23134 100644
--- a/lib/gitlab/webpack/manifest.rb
+++ b/lib/gitlab/webpack/manifest.rb
@@ -1,8 +1,5 @@
# frozen_string_literal: true
-require 'net/http'
-require 'uri'
-
module Gitlab
module Webpack
class Manifest
@@ -78,49 +75,16 @@ module Gitlab
end
def load_manifest
- data = if Gitlab.config.webpack.dev_server.enabled
- load_dev_server_manifest
- else
- load_static_manifest
- end
+ data = Gitlab::Webpack::FileLoader.load(Gitlab.config.webpack.manifest_filename)
Gitlab::Json.parse(data)
- end
-
- def load_dev_server_manifest
- host = Gitlab.config.webpack.dev_server.host
- port = Gitlab.config.webpack.dev_server.port
- scheme = Gitlab.config.webpack.dev_server.https ? 'https' : 'http'
- uri = Addressable::URI.new(scheme: scheme, host: host, port: port, path: dev_server_path)
-
- # localhost could be blocked via Gitlab::HTTP
- response = HTTParty.get(uri.to_s, verify: false) # rubocop:disable Gitlab/HTTParty
-
- return response.body if response.code == 200
-
- raise "HTTP error #{response.code}"
- rescue OpenSSL::SSL::SSLError, EOFError => e
+ rescue Gitlab::Webpack::FileLoader::StaticLoadError => e
+ raise ManifestLoadError.new("Could not load compiled manifest from #{e.uri}.\n\nHave you run `rake gitlab:assets:compile`?", e.original_error)
+ rescue Gitlab::Webpack::FileLoader::DevServerSSLError => e
ssl_status = Gitlab.config.webpack.dev_server.https ? ' over SSL' : ''
- raise ManifestLoadError.new("Could not connect to webpack-dev-server at #{uri}#{ssl_status}.\n\nIs SSL enabled? Check that settings in `gitlab.yml` and webpack-dev-server match.", e)
- rescue StandardError => e
- raise ManifestLoadError.new("Could not load manifest from webpack-dev-server at #{uri}.\n\nIs webpack-dev-server running? Try running `gdk status webpack` or `gdk tail webpack`.", e)
- end
-
- def load_static_manifest
- File.read(static_manifest_path)
- rescue StandardError => e
- raise ManifestLoadError.new("Could not load compiled manifest from #{static_manifest_path}.\n\nHave you run `rake gitlab:assets:compile`?", e)
- end
-
- def static_manifest_path
- ::Rails.root.join(
- Gitlab.config.webpack.output_dir,
- Gitlab.config.webpack.manifest_filename
- )
- end
-
- def dev_server_path
- "/#{Gitlab.config.webpack.public_path}/#{Gitlab.config.webpack.manifest_filename}"
+ raise ManifestLoadError.new("Could not connect to webpack-dev-server at #{e.uri}#{ssl_status}.\n\nIs SSL enabled? Check that settings in `gitlab.yml` and webpack-dev-server match.", e.original_error)
+ rescue Gitlab::Webpack::FileLoader::DevServerLoadError => e
+ raise ManifestLoadError.new("Could not load manifest from webpack-dev-server at #{e.uri}.\n\nIs webpack-dev-server running? Try running `gdk status webpack` or `gdk tail webpack`.", e.original_error)
end
end
end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index c40aa2273aa..3a905a2e1c5 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -8,6 +8,7 @@ require 'uri'
module Gitlab
class Workhorse
SEND_DATA_HEADER = 'Gitlab-Workhorse-Send-Data'
+ SEND_DEPENDENCY_CONTENT_TYPE_HEADER = 'Workhorse-Proxy-Content-Type'
VERSION_FILE = 'GITLAB_WORKHORSE_VERSION'
INTERNAL_API_CONTENT_TYPE = 'application/vnd.gitlab-workhorse+json'
INTERNAL_API_REQUEST_HEADER = 'Gitlab-Workhorse-Api-Request'
@@ -170,9 +171,9 @@ module Gitlab
]
end
- def send_dependency(token, url)
+ def send_dependency(headers, url)
params = {
- 'Header' => { Authorization: ["Bearer #{token}"] },
+ 'Header' => headers,
'Url' => url
}
diff --git a/lib/gitlab/x509/certificate.rb b/lib/gitlab/x509/certificate.rb
index c7289a51b49..752f3c6b004 100644
--- a/lib/gitlab/x509/certificate.rb
+++ b/lib/gitlab/x509/certificate.rb
@@ -19,6 +19,10 @@ module Gitlab
ca_certs.map(&:to_pem).join('\n') unless ca_certs.blank?
end
+ class << self
+ include ::Gitlab::Utils::StrongMemoize
+ end
+
def self.from_strings(key_string, cert_string, ca_certs_string = nil)
key = OpenSSL::PKey::RSA.new(key_string)
cert = OpenSSL::X509::Certificate.new(cert_string)
@@ -33,6 +37,30 @@ module Gitlab
from_strings(File.read(key_path), File.read(cert_path), ca_certs_string)
end
+ # Returns all top-level, readable files in the default CA cert directory
+ def self.ca_certs_paths
+ cert_paths = Dir["#{OpenSSL::X509::DEFAULT_CERT_DIR}/*"].select do |path|
+ !File.directory?(path) && File.readable?(path)
+ end
+ cert_paths << OpenSSL::X509::DEFAULT_CERT_FILE if File.exist? OpenSSL::X509::DEFAULT_CERT_FILE
+ cert_paths
+ end
+
+ # Returns a concatenated array of Strings, each being a PEM-coded CA certificate.
+ def self.ca_certs_bundle
+ strong_memoize(:ca_certs_bundle) do
+ ca_certs_paths.flat_map do |cert_file|
+ load_ca_certs_bundle(File.read(cert_file))
+ rescue OpenSSL::OpenSSLError => e
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e, cert_file: cert_file)
+ end.uniq.join("\n")
+ end
+ end
+
+ def self.reset_ca_certs_bundle
+ clear_memoization(:ca_certs_bundle)
+ end
+
# Returns an array of OpenSSL::X509::Certificate objects, empty array if none found
#
# Ruby OpenSSL::X509::Certificate.new will only load the first
diff --git a/lib/gitlab/zentao/client.rb b/lib/gitlab/zentao/client.rb
index bdfa4b3a308..8acfb4913f3 100644
--- a/lib/gitlab/zentao/client.rb
+++ b/lib/gitlab/zentao/client.rb
@@ -15,10 +15,8 @@ module Gitlab
end
def ping
- response = fetch_product(zentao_product_xid)
-
- active = response.fetch('deleted') == '0' rescue false
-
+ response = fetch_product(zentao_product_xid) rescue {}
+ active = response['deleted'] == '0'
if active
{ success: true }
else
@@ -31,25 +29,30 @@ module Gitlab
end
def fetch_issues(params = {})
- get("products/#{zentao_product_xid}/issues",
- params.reverse_merge(page: 1, limit: 20))
+ get("products/#{zentao_product_xid}/issues", params)
end
def fetch_issue(issue_id)
+ raise Gitlab::Zentao::Client::Error, 'invalid issue id' unless issue_id_pattern.match(issue_id)
+
get("issues/#{issue_id}")
end
private
+ def issue_id_pattern
+ /\A\S+-\d+\z/
+ end
+
def get(path, params = {})
options = { headers: headers, query: params }
response = Gitlab::HTTP.get(url(path), options)
- return {} unless response.success?
+ raise Gitlab::Zentao::Client::Error, 'request error' unless response.success?
Gitlab::Json.parse(response.body)
rescue JSON::ParserError
- {}
+ raise Gitlab::Zentao::Client::Error, 'invalid response format'
end
def url(path)
diff --git a/lib/gitlab/zentao/query.rb b/lib/gitlab/zentao/query.rb
new file mode 100644
index 00000000000..d40ee80939a
--- /dev/null
+++ b/lib/gitlab/zentao/query.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Zentao
+ class Query
+ STATUSES = %w[all opened closed].freeze
+ ISSUES_DEFAULT_LIMIT = 20
+ ISSUES_MAX_LIMIT = 50
+
+ def initialize(integration, params)
+ @client = Client.new(integration)
+ @params = params
+ end
+
+ def issues
+ issues_response = client.fetch_issues(query_options)
+ return [] if issues_response.blank?
+
+ Kaminari.paginate_array(
+ issues_response['issues'],
+ limit: issues_response['limit'],
+ total_count: issues_response['total']
+ )
+ end
+
+ def issue
+ issue_response = client.fetch_issue(params[:id])
+ issue_response['issue']
+ end
+
+ private
+
+ attr_reader :client, :params
+
+ def query_options
+ {
+ order: query_order,
+ status: query_status,
+ labels: query_labels,
+ page: query_page,
+ limit: query_limit,
+ search: query_search
+ }
+ end
+
+ def query_page
+ params[:page].presence || 1
+ end
+
+ def query_limit
+ limit = params[:limit].presence || ISSUES_DEFAULT_LIMIT
+ [limit.to_i, ISSUES_MAX_LIMIT].min
+ end
+
+ def query_search
+ params[:search] || ''
+ end
+
+ def query_order
+ key, order = params['sort'].to_s.split('_', 2)
+ zentao_key = (key == 'created' ? 'openedDate' : 'lastEditedDate')
+ zentao_order = (order == 'asc' ? 'asc' : 'desc')
+
+ "#{zentao_key}_#{zentao_order}"
+ end
+
+ def query_status
+ return params[:state] if params[:state].present? && params[:state].in?(STATUSES)
+
+ 'opened'
+ end
+
+ def query_labels
+ (params[:labels].presence || []).join(',')
+ end
+ end
+ end
+end