diff options
Diffstat (limited to 'lib/gitlab')
300 files changed, 3226 insertions, 4672 deletions
diff --git a/lib/gitlab/action_cable/request_store_callbacks.rb b/lib/gitlab/action_cable/request_store_callbacks.rb index a9f30b0fc10..14d80a7c40c 100644 --- a/lib/gitlab/action_cable/request_store_callbacks.rb +++ b/lib/gitlab/action_cable/request_store_callbacks.rb @@ -5,8 +5,6 @@ module Gitlab module RequestStoreCallbacks def self.install ::ActionCable::Server::Worker.set_callback :work, :around, &wrapper - ::ActionCable::Channel::Base.set_callback :subscribe, :around, &wrapper - ::ActionCable::Channel::Base.set_callback :unsubscribe, :around, &wrapper end def self.wrapper diff --git a/lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb b/lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb index b00925495f2..1d041e76277 100644 --- a/lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb +++ b/lib/gitlab/analytics/cycle_analytics/aggregated/stage_query_helpers.rb @@ -32,7 +32,7 @@ module Gitlab end def duration_in_seconds(duration_expression = duration) - Arel::Nodes::Extract.new(duration_expression, :epoch) + Arel::Nodes::NamedFunction.new('CAST', [Arel::Nodes::Extract.new(duration_expression, :epoch).as('double precision')]) end end end diff --git a/lib/gitlab/analytics/cycle_analytics/average.rb b/lib/gitlab/analytics/cycle_analytics/average.rb index 7140d31d536..4113e2e5d6a 100644 --- a/lib/gitlab/analytics/cycle_analytics/average.rb +++ b/lib/gitlab/analytics/cycle_analytics/average.rb @@ -41,7 +41,7 @@ module Gitlab end def average_in_seconds - Arel::Nodes::Extract.new(average, :epoch) + Arel::Nodes::NamedFunction.new('CAST', [Arel::Nodes::Extract.new(average, :epoch).as('double precision')]) end end end diff --git a/lib/gitlab/analytics/cycle_analytics/median.rb b/lib/gitlab/analytics/cycle_analytics/median.rb index 5775d0324c6..0958cc39945 100644 --- a/lib/gitlab/analytics/cycle_analytics/median.rb +++ b/lib/gitlab/analytics/cycle_analytics/median.rb @@ -38,7 +38,8 @@ module Gitlab end def median_duration_in_seconds - Arel::Nodes::Extract.new(percentile_cont, :epoch) + Arel::Nodes::NamedFunction.new('CAST', + [Arel::Nodes::Extract.new(percentile_cont, :epoch).as('double precision')]) end end end diff --git a/lib/gitlab/analytics/cycle_analytics/request_params.rb b/lib/gitlab/analytics/cycle_analytics/request_params.rb index 3e70d64fea6..2c4b0215307 100644 --- a/lib/gitlab/analytics/cycle_analytics/request_params.rb +++ b/lib/gitlab/analytics/cycle_analytics/request_params.rb @@ -93,6 +93,8 @@ module Gitlab attrs[:stage] = stage_data_attributes.to_json if stage_id.present? attrs[:namespace] = namespace_attributes attrs[:enable_tasks_by_type_chart] = 'false' + attrs[:enable_customizable_stages] = 'false' + attrs[:enable_projects_filter] = 'false' attrs[:default_stages] = Gitlab::Analytics::CycleAnalytics::DefaultStages.all.map do |stage_params| ::Analytics::CycleAnalytics::StagePresenter.new(stage_params) end.to_json @@ -114,7 +116,7 @@ module Gitlab { project_id: project.id, - group_path: project.group&.path, + group_path: project.group ? "groups/#{project.group&.full_path}" : nil, request_path: url_helpers.project_cycle_analytics_path(project), full_path: project.full_path } @@ -145,7 +147,8 @@ module Gitlab { name: project.name, - full_path: project.full_path + full_path: project.full_path, + type: namespace.type } end diff --git a/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb b/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb index 5648984ecbb..3416a916e26 100644 --- a/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb +++ b/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb @@ -13,7 +13,9 @@ module Gitlab end def round_duration_to_seconds - Arel::Nodes::NamedFunction.new('ROUND', [Arel::Nodes::Extract.new(duration, :epoch)]) + Arel::Nodes::NamedFunction.new('ROUND', [ + Arel::Nodes::NamedFunction.new('CAST', [Arel::Nodes::Extract.new(duration, :epoch).as('double precision')]) + ]) end def duration diff --git a/lib/gitlab/app_logger.rb b/lib/gitlab/app_logger.rb index 40bdc538594..decc6be3410 100644 --- a/lib/gitlab/app_logger.rb +++ b/lib/gitlab/app_logger.rb @@ -2,18 +2,12 @@ module Gitlab class AppLogger < Gitlab::MultiDestinationLogger - LOGGERS = [Gitlab::AppTextLogger, Gitlab::AppJsonLogger].freeze - def self.loggers - if Gitlab::Utils.to_boolean(ENV.fetch('UNSTRUCTURED_RAILS_LOG', 'false')) - LOGGERS - else - [Gitlab::AppJsonLogger] - end + [Gitlab::AppJsonLogger] end def self.primary_logger - Gitlab::AppTextLogger + Gitlab::AppJsonLogger end end end diff --git a/lib/gitlab/application_context.rb b/lib/gitlab/application_context.rb index 06ce1dbdc77..0ea52b7b7c8 100644 --- a/lib/gitlab/application_context.rb +++ b/lib/gitlab/application_context.rb @@ -25,7 +25,8 @@ module Gitlab :artifact_used_cdn, :artifacts_dependencies_size, :artifacts_dependencies_count, - :root_caller_id + :root_caller_id, + :merge_action_status ].freeze private_constant :KNOWN_KEYS @@ -43,7 +44,8 @@ module Gitlab Attribute.new(:artifact_used_cdn, Object), Attribute.new(:artifacts_dependencies_size, Integer), Attribute.new(:artifacts_dependencies_count, Integer), - Attribute.new(:root_caller_id, String) + Attribute.new(:root_caller_id, String), + Attribute.new(:merge_action_status, String) ].freeze def self.known_keys @@ -97,6 +99,7 @@ module Gitlab assign_hash_if_value(hash, :artifact_used_cdn) assign_hash_if_value(hash, :artifacts_dependencies_size) assign_hash_if_value(hash, :artifacts_dependencies_count) + assign_hash_if_value(hash, :merge_action_status) hash[:user] = -> { username } if include_user? hash[:user_id] = -> { user_id } if include_user? diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb index 71629eb701c..5b5f69858d3 100644 --- a/lib/gitlab/application_rate_limiter.rb +++ b/lib/gitlab/application_rate_limiter.rb @@ -56,6 +56,7 @@ module Gitlab namespace_exists: { threshold: 20, interval: 1.minute }, fetch_google_ip_list: { threshold: 10, interval: 1.minute }, project_fork_sync: { threshold: 10, interval: 30.minutes }, + ai_action: { threshold: 20, interval: 1.hour }, jobs_index: { threshold: 600, interval: 1.minute }, bulk_import: { threshold: 6, interval: 1.minute }, projects_api_rate_limit_unauthenticated: { diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb index 06bdb2c1ddc..9268fdd8519 100644 --- a/lib/gitlab/auth.rb +++ b/lib/gitlab/auth.rb @@ -3,7 +3,7 @@ module Gitlab module Auth MissingPersonalAccessTokenError = Class.new(StandardError) - IpBlacklisted = Class.new(StandardError) + IpBlocked = Class.new(StandardError) # Scopes used for GitLab API access API_SCOPE = :api @@ -29,6 +29,12 @@ module Gitlab WRITE_REGISTRY_SCOPE = :write_registry REGISTRY_SCOPES = [READ_REGISTRY_SCOPE, WRITE_REGISTRY_SCOPE].freeze + # Scopes used for GitLab Observability access which is outside of the GitLab app itself. + # Hence the lack of ability mapping in `abilities_for_scopes`. + READ_OBSERVABILITY_SCOPE = :read_observability + WRITE_OBSERVABILITY_SCOPE = :write_observability + OBSERVABILITY_SCOPES = [READ_OBSERVABILITY_SCOPE, WRITE_OBSERVABILITY_SCOPE].freeze + # Scopes used for GitLab as admin SUDO_SCOPE = :sudo ADMIN_MODE_SCOPE = :admin_mode @@ -51,7 +57,7 @@ module Gitlab rate_limiter = Gitlab::Auth::IpRateLimiter.new(ip) - raise IpBlacklisted if !skip_rate_limit?(login: login) && rate_limiter.banned? + raise IpBlocked if !skip_rate_limit?(login: login) && rate_limiter.banned? # `user_with_password_for_git` should be the last check # because it's the most expensive, especially when LDAP @@ -364,14 +370,8 @@ module Gitlab ] end - def available_scopes_for(current_user) - scopes = non_admin_available_scopes - - if current_user.admin? # rubocop: disable Cop/UserAdmin - scopes += Feature.enabled?(:admin_mode_for_api) ? ADMIN_SCOPES : [SUDO_SCOPE] - end - - scopes + def available_scopes_for(resource) + available_scopes_for_resource(resource) - unavailable_scopes_for_resource(resource) end def all_available_scopes @@ -390,13 +390,40 @@ module Gitlab end def resource_bot_scopes - Gitlab::Auth::API_SCOPES + Gitlab::Auth::REPOSITORY_SCOPES + Gitlab::Auth.registry_scopes - [:read_user] + non_admin_available_scopes - [READ_USER_SCOPE] end private + def available_scopes_for_resource(resource) + case resource + when User + scopes = non_admin_available_scopes + + if resource.admin? # rubocop: disable Cop/UserAdmin + scopes += Feature.enabled?(:admin_mode_for_api) ? ADMIN_SCOPES : [SUDO_SCOPE] + end + + scopes + when Project, Group + resource_bot_scopes + else + [] + end + end + + def unavailable_scopes_for_resource(resource) + unavailable_observability_scopes_for_resource(resource) + end + + def unavailable_observability_scopes_for_resource(resource) + return [] if resource.is_a?(Group) && Gitlab::Observability.enabled?(resource) + + OBSERVABILITY_SCOPES + end + def non_admin_available_scopes - API_SCOPES + REPOSITORY_SCOPES + registry_scopes + API_SCOPES + REPOSITORY_SCOPES + registry_scopes + OBSERVABILITY_SCOPES end def find_build_by_token(token) diff --git a/lib/gitlab/auth/o_auth/auth_hash.rb b/lib/gitlab/auth/o_auth/auth_hash.rb index 82a5aad360c..d1eede65f0c 100644 --- a/lib/gitlab/auth/o_auth/auth_hash.rb +++ b/lib/gitlab/auth/o_auth/auth_hash.rb @@ -76,7 +76,13 @@ module Gitlab end def get_from_auth_hash_or_info(key) - coerce_utf8(auth_hash[key]) || get_info(key) + if auth_hash.key?(key) + coerce_utf8(auth_hash[key]) + elsif auth_hash.key?(:extra) && auth_hash.extra.key?(:raw_info) && !auth_hash.extra.raw_info[key].nil? + coerce_utf8(auth_hash.extra.raw_info[key]) + else + get_info(key) + end end # Allow for configuring a custom username claim per provider from diff --git a/lib/gitlab/auth/u2f_webauthn_converter.rb b/lib/gitlab/auth/u2f_webauthn_converter.rb deleted file mode 100644 index 20b5d2ddc88..00000000000 --- a/lib/gitlab/auth/u2f_webauthn_converter.rb +++ /dev/null @@ -1,40 +0,0 @@ -# frozen_string_literal: true - -require 'webauthn/u2f_migrator' - -module Gitlab - module Auth - class U2fWebauthnConverter - def initialize(u2f_registration) - @u2f_registration = u2f_registration - end - - def convert - now = Time.current - - converted_credential = WebAuthn::U2fMigrator.new( - app_id: Gitlab.config.gitlab.url, - certificate: u2f_registration.certificate, - key_handle: u2f_registration.key_handle, - public_key: u2f_registration.public_key, - counter: u2f_registration.counter - ).credential - - { - credential_xid: Base64.strict_encode64(converted_credential.id), - public_key: Base64.strict_encode64(converted_credential.public_key), - counter: u2f_registration.counter || 0, - name: u2f_registration.name || '', - user_id: u2f_registration.user_id, - u2f_registration_id: u2f_registration.id, - created_at: now, - updated_at: now - } - end - - private - - attr_reader :u2f_registration - end - end -end diff --git a/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens.rb b/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens.rb index 6f5ddec628d..2127ce5975d 100644 --- a/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens.rb +++ b/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens.rb @@ -19,7 +19,11 @@ module Gitlab def perform each_sub_batch do |sub_batch| sub_batch.each do |token| - token.update!(scopes: (YAML.safe_load(token.scopes) + ADMIN_MODE_SCOPE).uniq.to_yaml) + existing_scopes = YAML.safe_load(token.scopes, permitted_classes: [Symbol]) + # making sure scopes are not mixed symbols and strings + stringified_scopes = existing_scopes.map(&:to_s) + + token.update!(scopes: (stringified_scopes + ADMIN_MODE_SCOPE).uniq.to_yaml) end end end diff --git a/lib/gitlab/background_migration/backfill_ci_queuing_tables.rb b/lib/gitlab/background_migration/backfill_ci_queuing_tables.rb deleted file mode 100644 index 63112b52584..00000000000 --- a/lib/gitlab/background_migration/backfill_ci_queuing_tables.rb +++ /dev/null @@ -1,153 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Ensure queuing entries are present even if admins skip upgrades. - class BackfillCiQueuingTables - class Namespace < ActiveRecord::Base # rubocop:disable Style/Documentation - self.table_name = 'namespaces' - self.inheritance_column = :_type_disabled - end - - class Project < ActiveRecord::Base # rubocop:disable Style/Documentation - self.table_name = 'projects' - - belongs_to :namespace - has_one :ci_cd_settings, class_name: 'Gitlab::BackgroundMigration::BackfillCiQueuingTables::ProjectCiCdSetting' - - def group_runners_enabled? - return false unless ci_cd_settings - - ci_cd_settings.group_runners_enabled? - end - end - - class ProjectCiCdSetting < ActiveRecord::Base # rubocop:disable Style/Documentation - self.table_name = 'project_ci_cd_settings' - end - - class Taggings < ActiveRecord::Base # rubocop:disable Style/Documentation - self.table_name = 'taggings' - end - - module Ci - class Build < ActiveRecord::Base # rubocop:disable Style/Documentation - include EachBatch - - self.table_name = 'ci_builds' - self.inheritance_column = :_type_disabled - - belongs_to :project - - scope :pending, -> do - where(status: :pending, type: 'Ci::Build', runner_id: nil) - end - - def self.each_batch(of: 1000, column: :id, order: { runner_id: :asc, id: :asc }, order_hint: nil) - start = except(:select).select(column).reorder(order) - start = start.take - return unless start - - start_id = start[column] - arel_table = self.arel_table - - 1.step do |index| - start_cond = arel_table[column].gteq(start_id) - stop = except(:select).select(column).where(start_cond).reorder(order) - stop = stop.offset(of).limit(1).take - relation = where(start_cond) - - if stop - stop_id = stop[column] - start_id = stop_id - stop_cond = arel_table[column].lt(stop_id) - relation = relation.where(stop_cond) - end - - # Any ORDER BYs are useless for this relation and can lead to less - # efficient UPDATE queries, hence we get rid of it. - relation = relation.except(:order) - - # Using unscoped is necessary to prevent leaking the current scope used by - # ActiveRecord to chain `each_batch` method. - unscoped { yield relation, index } - - break unless stop - end - end - - def tags_ids - BackfillCiQueuingTables::Taggings - .where(taggable_id: id, taggable_type: 'CommitStatus') - .pluck(:tag_id) - end - end - - class PendingBuild < ActiveRecord::Base # rubocop:disable Style/Documentation - self.table_name = 'ci_pending_builds' - - class << self - def upsert_from_build!(build) - entry = self.new(args_from_build(build)) - - self.upsert( - entry.attributes.compact, - returning: %w[build_id], - unique_by: :build_id) - end - - def args_from_build(build) - project = build.project - - { - build_id: build.id, - project_id: build.project_id, - protected: build.protected?, - namespace_id: project.namespace_id, - tag_ids: build.tags_ids, - instance_runners_enabled: project.shared_runners_enabled?, - namespace_traversal_ids: namespace_traversal_ids(project) - } - end - - def namespace_traversal_ids(project) - if project.group_runners_enabled? - project.namespace.traversal_ids - else - [] - end - end - end - end - end - - BATCH_SIZE = 100 - - def perform(start_id, end_id) - scope = BackfillCiQueuingTables::Ci::Build.pending.where(id: start_id..end_id) - pending_builds_query = BackfillCiQueuingTables::Ci::PendingBuild - .where('ci_builds.id = ci_pending_builds.build_id') - .select(1) - - scope.each_batch(of: BATCH_SIZE) do |builds| - builds = builds.where('NOT EXISTS (?)', pending_builds_query) - builds = builds.includes(:project, project: [:namespace, :ci_cd_settings]) - - builds.each do |build| - BackfillCiQueuingTables::Ci::PendingBuild.upsert_from_build!(build) - end - end - - mark_job_as_succeeded(start_id, end_id) - end - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - self.class.name.demodulize, - arguments) - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_integrations_type_new.rb b/lib/gitlab/background_migration/backfill_integrations_type_new.rb deleted file mode 100644 index b07d9371c19..00000000000 --- a/lib/gitlab/background_migration/backfill_integrations_type_new.rb +++ /dev/null @@ -1,86 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Backfills the new `integrations.type_new` column, which contains - # the real class name, rather than the legacy class name in `type` - # which is mapped via `Gitlab::Integrations::StiType`. - class BackfillIntegrationsTypeNew - include Gitlab::Database::DynamicModelHelpers - - def perform(start_id, stop_id, batch_table, batch_column, sub_batch_size, pause_ms) - parent_batch_relation = define_batchable_model(batch_table, connection: connection) - .where(batch_column => start_id..stop_id) - - parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size) do |sub_batch| - process_sub_batch(sub_batch) - - sleep(pause_ms * 0.001) if pause_ms > 0 - end - end - - private - - def connection - ApplicationRecord.connection - end - - def process_sub_batch(sub_batch) - # Extract the start/stop IDs from the current sub-batch - sub_start_id, sub_stop_id = sub_batch.pick(Arel.sql('MIN(id), MAX(id)')) - - # This matches the mapping from the INSERT trigger added in - # db/migrate/20210721135638_add_triggers_to_integrations_type_new.rb - connection.execute(<<~SQL) - WITH mapping(old_type, new_type) AS (VALUES - ('AsanaService', 'Integrations::Asana'), - ('AssemblaService', 'Integrations::Assembla'), - ('BambooService', 'Integrations::Bamboo'), - ('BugzillaService', 'Integrations::Bugzilla'), - ('BuildkiteService', 'Integrations::Buildkite'), - ('CampfireService', 'Integrations::Campfire'), - ('ConfluenceService', 'Integrations::Confluence'), - ('CustomIssueTrackerService', 'Integrations::CustomIssueTracker'), - ('DatadogService', 'Integrations::Datadog'), - ('DiscordService', 'Integrations::Discord'), - ('DroneCiService', 'Integrations::DroneCi'), - ('EmailsOnPushService', 'Integrations::EmailsOnPush'), - ('EwmService', 'Integrations::Ewm'), - ('ExternalWikiService', 'Integrations::ExternalWiki'), - ('FlowdockService', 'Integrations::Flowdock'), - ('HangoutsChatService', 'Integrations::HangoutsChat'), - ('IrkerService', 'Integrations::Irker'), - ('JenkinsService', 'Integrations::Jenkins'), - ('JiraService', 'Integrations::Jira'), - ('MattermostService', 'Integrations::Mattermost'), - ('MattermostSlashCommandsService', 'Integrations::MattermostSlashCommands'), - ('MicrosoftTeamsService', 'Integrations::MicrosoftTeams'), - ('MockCiService', 'Integrations::MockCi'), - ('MockMonitoringService', 'Integrations::MockMonitoring'), - ('PackagistService', 'Integrations::Packagist'), - ('PipelinesEmailService', 'Integrations::PipelinesEmail'), - ('PivotaltrackerService', 'Integrations::Pivotaltracker'), - ('PrometheusService', 'Integrations::Prometheus'), - ('PushoverService', 'Integrations::Pushover'), - ('RedmineService', 'Integrations::Redmine'), - ('SlackService', 'Integrations::Slack'), - ('SlackSlashCommandsService', 'Integrations::SlackSlashCommands'), - ('TeamcityService', 'Integrations::Teamcity'), - ('UnifyCircuitService', 'Integrations::UnifyCircuit'), - ('WebexTeamsService', 'Integrations::WebexTeams'), - ('YoutrackService', 'Integrations::Youtrack'), - - -- EE-only integrations - ('GithubService', 'Integrations::Github'), - ('GitlabSlackApplicationService', 'Integrations::GitlabSlackApplication') - ) - - UPDATE integrations SET type_new = mapping.new_type - FROM mapping - WHERE integrations.id BETWEEN #{sub_start_id} AND #{sub_stop_id} - AND integrations.type = mapping.old_type - SQL - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb b/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb deleted file mode 100644 index 3b8a452b855..00000000000 --- a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb +++ /dev/null @@ -1,76 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # A job to set namespaces.traversal_ids in sub-batches, of all namespaces with - # a parent and not already set. - # rubocop:disable Style/Documentation - class BackfillNamespaceTraversalIdsChildren - class Namespace < ActiveRecord::Base - include ::EachBatch - - self.table_name = 'namespaces' - - scope :base_query, -> { where.not(parent_id: nil) } - end - - PAUSE_SECONDS = 0.1 - - def perform(start_id, end_id, sub_batch_size) - batch_query = Namespace.base_query.where(id: start_id..end_id) - batch_query.each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) - ranged_query = Namespace.unscoped.base_query.where(id: first..last) - - update_sql = <<~SQL - UPDATE namespaces - SET traversal_ids = calculated_ids.traversal_ids - FROM #{calculated_traversal_ids(ranged_query)} calculated_ids - WHERE namespaces.id = calculated_ids.id - AND namespaces.traversal_ids = '{}' - SQL - ApplicationRecord.connection.execute(update_sql) - - sleep PAUSE_SECONDS - end - - # We have to add all arguments when marking a job as succeeded as they - # are all used to track the job by `queue_background_migration_jobs_by_range_at_intervals` - mark_job_as_succeeded(start_id, end_id, sub_batch_size) - end - - private - - # Calculate the ancestor path for a given set of namespaces. - def calculated_traversal_ids(batch) - <<~SQL - ( - WITH RECURSIVE cte(source_id, namespace_id, parent_id, height) AS ( - ( - SELECT batch.id, batch.id, batch.parent_id, 1 - FROM (#{batch.to_sql}) AS batch - ) - UNION ALL - ( - SELECT cte.source_id, n.id, n.parent_id, cte.height+1 - FROM namespaces n, cte - WHERE n.id = cte.parent_id - ) - ) - SELECT flat_hierarchy.source_id as id, - array_agg(flat_hierarchy.namespace_id ORDER BY flat_hierarchy.height DESC) as traversal_ids - FROM (SELECT * FROM cte FOR UPDATE) flat_hierarchy - GROUP BY flat_hierarchy.source_id - ) - SQL - end - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - 'BackfillNamespaceTraversalIdsChildren', - arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots.rb b/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots.rb deleted file mode 100644 index c69289fb91f..00000000000 --- a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # A job to set namespaces.traversal_ids in sub-batches, of all namespaces - # without a parent and not already set. - # rubocop:disable Style/Documentation - class BackfillNamespaceTraversalIdsRoots - class Namespace < ActiveRecord::Base - include ::EachBatch - - self.table_name = 'namespaces' - - scope :base_query, -> { where(parent_id: nil) } - end - - PAUSE_SECONDS = 0.1 - - def perform(start_id, end_id, sub_batch_size) - ranged_query = Namespace.base_query - .where(id: start_id..end_id) - .where("traversal_ids = '{}'") - - ranged_query.each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) - - # The query need to be reconstructed because .each_batch modifies the default scope - # See: https://gitlab.com/gitlab-org/gitlab/-/issues/330510 - Namespace.unscoped - .base_query - .where(id: first..last) - .where("traversal_ids = '{}'") - .update_all('traversal_ids = ARRAY[id]') - - sleep PAUSE_SECONDS - end - - mark_job_as_succeeded(start_id, end_id, sub_batch_size) - end - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - 'BackfillNamespaceTraversalIdsRoots', - arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_partitioned_table.rb b/lib/gitlab/background_migration/backfill_partitioned_table.rb new file mode 100644 index 00000000000..6479d40a930 --- /dev/null +++ b/lib/gitlab/background_migration/backfill_partitioned_table.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # Background migration to generically copy data from the given table into its corresponding partitioned table + class BackfillPartitionedTable < BatchedMigrationJob + operation_name :upsert_partitioned_table + feature_category :database + job_arguments :partitioned_table + + def perform + validate_paritition_table! + + bulk_copy = Gitlab::Database::PartitioningMigrationHelpers::BulkCopy.new( + batch_table, + partitioned_table, + batch_column, + connection: connection + ) + + each_sub_batch do |relation| + sub_start_id, sub_stop_id = relation.pick(Arel.sql("MIN(#{batch_column}), MAX(#{batch_column})")) + bulk_copy.copy_between(sub_start_id, sub_stop_id) + end + end + + private + + def validate_paritition_table! + unless connection.table_exists?(partitioned_table) + raise "exiting backfill migration because partitioned table #{partitioned_table} does not exist. " \ + "This could be due to rollback of the migration which created the partitioned table." + end + + # rubocop: disable Style/GuardClause + unless Gitlab::Database::PostgresPartitionedTable.find_by_name_in_current_schema(partitioned_table).present? + raise "exiting backfill migration because the given destination table is not partitioned." + end + # rubocop: enable Style/GuardClause + end + end + end +end diff --git a/lib/gitlab/background_migration/backfill_upvotes_count_on_issues.rb b/lib/gitlab/background_migration/backfill_upvotes_count_on_issues.rb deleted file mode 100644 index 3bf6bf993dd..00000000000 --- a/lib/gitlab/background_migration/backfill_upvotes_count_on_issues.rb +++ /dev/null @@ -1,40 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Class that will populate the upvotes_count field - # for each issue - class BackfillUpvotesCountOnIssues - BATCH_SIZE = 1_000 - - def perform(start_id, stop_id) - (start_id..stop_id).step(BATCH_SIZE).each do |offset| - update_issue_upvotes_count(offset, offset + BATCH_SIZE) - end - end - - private - - def execute(sql) - @connection ||= ApplicationRecord.connection - @connection.execute(sql) - end - - def update_issue_upvotes_count(batch_start, batch_stop) - execute(<<~SQL) - UPDATE issues - SET upvotes_count = sub_q.count_all - FROM ( - SELECT COUNT(*) AS count_all, e.awardable_id AS issue_id - FROM award_emoji AS e - WHERE e.name = 'thumbsup' AND - e.awardable_type = 'Issue' AND - e.awardable_id BETWEEN #{batch_start} AND #{batch_stop} - GROUP BY issue_id - ) AS sub_q - WHERE sub_q.issue_id = issues.id; - SQL - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_user_namespace.rb b/lib/gitlab/background_migration/backfill_user_namespace.rb deleted file mode 100644 index df6b1f083c3..00000000000 --- a/lib/gitlab/background_migration/backfill_user_namespace.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Backfills the `namespaces.type` column, replacing any - # instances of `NULL` with `User` - class BackfillUserNamespace - include Gitlab::Database::DynamicModelHelpers - - def perform(start_id, end_id, batch_table, batch_column, sub_batch_size, pause_ms) - parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id) - parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size, order_hint: :type) do |sub_batch| - batch_metrics.time_operation(:update_all) do - sub_batch.update_all(type: 'User') - end - pause_ms = 0 if pause_ms < 0 - sleep(pause_ms * 0.001) - end - end - - def batch_metrics - @batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new - end - - private - - def connection - ApplicationRecord.connection - end - - def relation_scoped_to_range(source_table, source_key_column, start_id, stop_id) - define_batchable_model(source_table, connection: connection) - .where(source_key_column => start_id..stop_id) - .where(type: nil) - end - end - end -end diff --git a/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects.rb b/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects.rb deleted file mode 100644 index 4da120769a0..00000000000 --- a/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects.rb +++ /dev/null @@ -1,78 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # The migration is used to cleanup orphaned lfs_objects_projects in order to - # introduce valid foreign keys to this table - class CleanupOrphanedLfsObjectsProjects - # A model to access lfs_objects_projects table in migrations - class LfsObjectsProject < ActiveRecord::Base - self.table_name = 'lfs_objects_projects' - - include ::EachBatch - - belongs_to :lfs_object - belongs_to :project - end - - # A model to access lfs_objects table in migrations - class LfsObject < ActiveRecord::Base - self.table_name = 'lfs_objects' - end - - # A model to access projects table in migrations - class Project < ActiveRecord::Base - self.table_name = 'projects' - end - - SUB_BATCH_SIZE = 5000 - CLEAR_CACHE_DELAY = 1.minute - - def perform(start_id, end_id) - cleanup_lfs_objects_projects_without_lfs_object(start_id, end_id) - cleanup_lfs_objects_projects_without_project(start_id, end_id) - end - - private - - def cleanup_lfs_objects_projects_without_lfs_object(start_id, end_id) - each_record_without_association(start_id, end_id, :lfs_object, :lfs_objects) do |lfs_objects_projects_without_lfs_objects| - projects = Project.where(id: lfs_objects_projects_without_lfs_objects.select(:project_id)) - - if projects.present? - ProjectCacheWorker.bulk_perform_in_with_contexts( - CLEAR_CACHE_DELAY, - projects, - arguments_proc: ->(project) { [project.id, [], [:lfs_objects_size]] }, - context_proc: ->(project) { { project: project } } - ) - end - - lfs_objects_projects_without_lfs_objects.delete_all - end - end - - def cleanup_lfs_objects_projects_without_project(start_id, end_id) - each_record_without_association(start_id, end_id, :project, :projects) do |lfs_objects_projects_without_projects| - lfs_objects_projects_without_projects.delete_all - end - end - - def each_record_without_association(start_id, end_id, association, table_name) - batch = LfsObjectsProject.where(id: start_id..end_id) - - batch.each_batch(of: SUB_BATCH_SIZE) do |sub_batch| - first, last = sub_batch.pick(Arel.sql('min(lfs_objects_projects.id), max(lfs_objects_projects.id)')) - - lfs_objects_without_association = - LfsObjectsProject - .unscoped - .left_outer_joins(association) - .where(id: (first..last), table_name => { id: nil }) - - yield lfs_objects_without_association - end - end - end - end -end diff --git a/lib/gitlab/background_migration/delete_orphaned_deployments.rb b/lib/gitlab/background_migration/delete_orphaned_deployments.rb deleted file mode 100644 index 4a3a12ab53d..00000000000 --- a/lib/gitlab/background_migration/delete_orphaned_deployments.rb +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Background migration for deleting orphaned deployments. - class DeleteOrphanedDeployments - include Database::MigrationHelpers - - def perform(start_id, end_id) - orphaned_deployments - .where(id: start_id..end_id) - .delete_all - - mark_job_as_succeeded(start_id, end_id) - end - - def orphaned_deployments - define_batchable_model('deployments', connection: ApplicationRecord.connection) - .where('NOT EXISTS (SELECT 1 FROM environments WHERE deployments.environment_id = environments.id)') - end - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - self.class.name.demodulize, - arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images.rb b/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images.rb deleted file mode 100644 index dad5da875ab..00000000000 --- a/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images.rb +++ /dev/null @@ -1,41 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - BATCH_SIZE = 1000 - - # This background migration disables container expiration policies connected - # to a project that has no container repositories - class DisableExpirationPoliciesLinkedToNoContainerImages - # rubocop: disable Style/Documentation - class ContainerExpirationPolicy < ActiveRecord::Base - include EachBatch - - self.table_name = 'container_expiration_policies' - end - # rubocop: enable Style/Documentation - - def perform(from_id, to_id) - ContainerExpirationPolicy.where(enabled: true, project_id: from_id..to_id).each_batch(of: BATCH_SIZE) do |batch| - sql = <<-SQL - WITH batched_relation AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (#{batch.select(:project_id).limit(BATCH_SIZE).to_sql}) - UPDATE container_expiration_policies - SET enabled = FALSE - FROM batched_relation - WHERE container_expiration_policies.project_id = batched_relation.project_id - AND NOT EXISTS (SELECT 1 FROM "container_repositories" WHERE container_repositories.project_id = container_expiration_policies.project_id) - SQL - execute(sql) - end - end - - private - - def execute(sql) - ApplicationRecord - .connection - .execute(sql) - end - end - end -end diff --git a/lib/gitlab/background_migration/drop_invalid_remediations.rb b/lib/gitlab/background_migration/drop_invalid_remediations.rb deleted file mode 100644 index f0a0de586f5..00000000000 --- a/lib/gitlab/background_migration/drop_invalid_remediations.rb +++ /dev/null @@ -1,14 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # rubocop: disable Style/Documentation - class DropInvalidRemediations - def perform(start_id, stop_id) - end - end - # rubocop: enable Style/Documentation - end -end - -Gitlab::BackgroundMigration::DropInvalidRemediations.prepend_mod_with('Gitlab::BackgroundMigration::DropInvalidRemediations') diff --git a/lib/gitlab/background_migration/drop_invalid_security_findings.rb b/lib/gitlab/background_migration/drop_invalid_security_findings.rb deleted file mode 100644 index 000628e109c..00000000000 --- a/lib/gitlab/background_migration/drop_invalid_security_findings.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true -module Gitlab - module BackgroundMigration - # Drop rows from security_findings where the uuid is NULL - class DropInvalidSecurityFindings - # rubocop:disable Style/Documentation - class SecurityFinding < ActiveRecord::Base - include ::EachBatch - self.table_name = 'security_findings' - scope :no_uuid, -> { where(uuid: nil) } - end - # rubocop:enable Style/Documentation - - PAUSE_SECONDS = 0.1 - - def perform(start_id, end_id, sub_batch_size) - ranged_query = SecurityFinding - .where(id: start_id..end_id) - .no_uuid - - ranged_query.each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) - - # The query need to be reconstructed because .each_batch modifies the default scope - # See: https://gitlab.com/gitlab-org/gitlab/-/issues/330510 - SecurityFinding.unscoped - .where(id: first..last) - .no_uuid - .delete_all - - sleep PAUSE_SECONDS - end - - mark_job_as_succeeded(start_id, end_id, sub_batch_size) - end - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - self.class.name.demodulize, - arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/drop_invalid_vulnerabilities.rb b/lib/gitlab/background_migration/drop_invalid_vulnerabilities.rb deleted file mode 100644 index 293530f6536..00000000000 --- a/lib/gitlab/background_migration/drop_invalid_vulnerabilities.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true - -# rubocop: disable Style/Documentation -class Gitlab::BackgroundMigration::DropInvalidVulnerabilities - # rubocop: disable Gitlab/NamespacedClass - class Vulnerability < ActiveRecord::Base - self.table_name = "vulnerabilities" - has_many :findings, class_name: 'VulnerabilitiesFinding', inverse_of: :vulnerability - end - - class VulnerabilitiesFinding < ActiveRecord::Base - self.table_name = "vulnerability_occurrences" - belongs_to :vulnerability, class_name: 'Vulnerability', inverse_of: :findings, foreign_key: 'vulnerability_id' - end - # rubocop: enable Gitlab/NamespacedClass - - # rubocop: disable CodeReuse/ActiveRecord - def perform(start_id, end_id) - Vulnerability - .where(id: start_id..end_id) - .left_joins(:findings) - .where(vulnerability_occurrences: { vulnerability_id: nil }) - .delete_all - - mark_job_as_succeeded(start_id, end_id) - end - # rubocop: enable CodeReuse/ActiveRecord - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - 'DropInvalidVulnerabilities', - arguments - ) - end -end diff --git a/lib/gitlab/background_migration/encrypt_ci_trigger_token.rb b/lib/gitlab/background_migration/encrypt_ci_trigger_token.rb index b6e22e481fa..237c655a48a 100644 --- a/lib/gitlab/background_migration/encrypt_ci_trigger_token.rb +++ b/lib/gitlab/background_migration/encrypt_ci_trigger_token.rb @@ -18,8 +18,7 @@ module Gitlab mode: :per_attribute_iv, algorithm: 'aes-256-gcm', key: Settings.attr_encrypted_db_key_base_32, - encode: false, - encode_vi: false + encode: false before_save :copy_token_to_encrypted_token diff --git a/lib/gitlab/background_migration/encrypt_static_object_token.rb b/lib/gitlab/background_migration/encrypt_static_object_token.rb deleted file mode 100644 index 961dea028c9..00000000000 --- a/lib/gitlab/background_migration/encrypt_static_object_token.rb +++ /dev/null @@ -1,70 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Populates "static_object_token_encrypted" field with encrypted versions - # of values from "static_object_token" field - class EncryptStaticObjectToken - # rubocop:disable Style/Documentation - class User < ActiveRecord::Base - include ::EachBatch - self.table_name = 'users' - scope :with_static_object_token, -> { where.not(static_object_token: nil) } - scope :without_static_object_token_encrypted, -> { where(static_object_token_encrypted: nil) } - end - # rubocop:enable Style/Documentation - - BATCH_SIZE = 100 - - def perform(start_id, end_id) - ranged_query = User - .where(id: start_id..end_id) - .with_static_object_token - .without_static_object_token_encrypted - - ranged_query.each_batch(of: BATCH_SIZE) do |sub_batch| - first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) - - batch_query = User.unscoped - .where(id: first..last) - .with_static_object_token - .without_static_object_token_encrypted - - user_tokens = batch_query.pluck(:id, :static_object_token) - - user_encrypted_tokens = user_tokens.map do |(id, plaintext_token)| - next if plaintext_token.blank? - - [id, Gitlab::CryptoHelper.aes256_gcm_encrypt(plaintext_token)] - end - - encrypted_tokens_sql = user_encrypted_tokens.compact.map { |(id, token)| "(#{id}, '#{token}')" }.join(',') - - next unless user_encrypted_tokens.present? - - User.connection.execute(<<~SQL) - WITH cte(cte_id, cte_token) AS #{::Gitlab::Database::AsWithMaterialized.materialized_if_supported} ( - SELECT * - FROM (VALUES #{encrypted_tokens_sql}) AS t (id, token) - ) - UPDATE #{User.table_name} - SET static_object_token_encrypted = cte_token - FROM cte - WHERE cte_id = id - SQL - end - - mark_job_as_succeeded(start_id, end_id) - end - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - self.class.name.demodulize, - arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/extract_project_topics_into_separate_table.rb b/lib/gitlab/background_migration/extract_project_topics_into_separate_table.rb deleted file mode 100644 index 31b5b5cdb73..00000000000 --- a/lib/gitlab/background_migration/extract_project_topics_into_separate_table.rb +++ /dev/null @@ -1,63 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # The class to extract the project topics into a separate `topics` table - class ExtractProjectTopicsIntoSeparateTable - # Temporary AR table for tags - class Tag < ActiveRecord::Base - self.table_name = 'tags' - end - - # Temporary AR table for taggings - class Tagging < ActiveRecord::Base - self.table_name = 'taggings' - belongs_to :tag - end - - # Temporary AR table for topics - class Topic < ActiveRecord::Base - self.table_name = 'topics' - end - - # Temporary AR table for project topics - class ProjectTopic < ActiveRecord::Base - self.table_name = 'project_topics' - belongs_to :topic - end - - # Temporary AR table for projects - class Project < ActiveRecord::Base - self.table_name = 'projects' - end - - def perform(start_id, stop_id) - Tagging.includes(:tag).where(taggable_type: 'Project', id: start_id..stop_id).each do |tagging| - if Project.exists?(id: tagging.taggable_id) && tagging.tag - begin - topic = Topic.find_or_create_by(name: tagging.tag.name) - project_topic = ProjectTopic.find_or_create_by(project_id: tagging.taggable_id, topic: topic) - - tagging.delete if project_topic.persisted? - rescue StandardError => e - Gitlab::ErrorTracking.log_exception(e, tagging_id: tagging.id) - end - else - tagging.delete - end - end - - mark_job_as_succeeded(start_id, stop_id) - end - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - self.class.name.demodulize, - arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/fix_incorrect_max_seats_used.rb b/lib/gitlab/background_migration/fix_incorrect_max_seats_used.rb deleted file mode 100644 index 2c09b8c0b24..00000000000 --- a/lib/gitlab/background_migration/fix_incorrect_max_seats_used.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # rubocop: disable Style/Documentation - class FixIncorrectMaxSeatsUsed - def perform(batch = nil) - end - end - end -end - -Gitlab::BackgroundMigration::FixIncorrectMaxSeatsUsed.prepend_mod_with('Gitlab::BackgroundMigration::FixIncorrectMaxSeatsUsed') diff --git a/lib/gitlab/background_migration/fix_merge_request_diff_commit_users.rb b/lib/gitlab/background_migration/fix_merge_request_diff_commit_users.rb deleted file mode 100644 index 4df55a7b02a..00000000000 --- a/lib/gitlab/background_migration/fix_merge_request_diff_commit_users.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Background migration for fixing merge_request_diff_commit rows that don't - # have committer/author details due to - # https://gitlab.com/gitlab-org/gitlab/-/issues/344080. - class FixMergeRequestDiffCommitUsers - BATCH_SIZE = 100 - - def initialize - @commits = {} - @users = {} - end - - def perform(project_id) - # No-op, see https://gitlab.com/gitlab-org/gitlab/-/issues/344540 - end - end - end -end diff --git a/lib/gitlab/background_migration/merge_topics_with_same_name.rb b/lib/gitlab/background_migration/merge_topics_with_same_name.rb deleted file mode 100644 index 07231098a5f..00000000000 --- a/lib/gitlab/background_migration/merge_topics_with_same_name.rb +++ /dev/null @@ -1,76 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # The class to merge project topics with the same case insensitive name - class MergeTopicsWithSameName - # Temporary AR model for topics - class Topic < ActiveRecord::Base - self.table_name = 'topics' - end - - # Temporary AR model for project topic assignment - class ProjectTopic < ActiveRecord::Base - self.table_name = 'project_topics' - end - - def perform(topic_names) - topic_names.each do |topic_name| - topics = Topic.where('LOWER(name) = ?', topic_name) - .order(total_projects_count: :desc, non_private_projects_count: :desc, id: :asc) - .to_a - topic_to_keep = topics.shift - merge_topics(topic_to_keep, topics) if topics.any? - end - end - - private - - def merge_topics(topic_to_keep, topics_to_remove) - description = topic_to_keep.description - - topics_to_remove.each do |topic| - description ||= topic.description if topic.description.present? - process_avatar(topic_to_keep, topic) if topic.avatar.present? - - ProjectTopic.transaction do - ProjectTopic.where(topic_id: topic.id) - .where.not(project_id: ProjectTopic.where(topic_id: topic_to_keep).select(:project_id)) - .update_all(topic_id: topic_to_keep.id) - ProjectTopic.where(topic_id: topic.id).delete_all - end - end - - Topic.where(id: topics_to_remove).delete_all - - topic_to_keep.update( - description: description, - total_projects_count: total_projects_count(topic_to_keep.id), - non_private_projects_count: non_private_projects_count(topic_to_keep.id) - ) - end - - # We intentionally use application code here because we need to copy/remove avatar files - def process_avatar(topic_to_keep, topic_to_remove) - topic_to_remove = ::Projects::Topic.find(topic_to_remove.id) - topic_to_keep = ::Projects::Topic.find(topic_to_keep.id) - unless topic_to_keep.avatar.present? - topic_to_keep.avatar = topic_to_remove.avatar - topic_to_keep.save! - end - - topic_to_remove.remove_avatar! - topic_to_remove.save! - end - - def total_projects_count(topic_id) - ProjectTopic.where(topic_id: topic_id).count - end - - def non_private_projects_count(topic_id) - ProjectTopic.joins('INNER JOIN projects ON project_topics.project_id = projects.id') - .where(project_topics: { topic_id: topic_id }).where('projects.visibility_level in (10, 20)').count - end - end - end -end diff --git a/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings.rb b/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings.rb index 78a93b49c49..95e65d80a7a 100644 --- a/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings.rb +++ b/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings.rb @@ -41,14 +41,7 @@ module Gitlab build_evidence(finding, evidence) end.compact - begin - create_evidences(attrs) if attrs.present? - rescue StandardError => e - logger.error( - message: e.message, - class: self.class.name - ) - end + create_evidences(attrs) if attrs.present? end def build_evidence(finding, evidence) @@ -72,10 +65,6 @@ module Gitlab rescue JSON::ParserError nil end - - def logger - @logger ||= ::Gitlab::AppLogger - end end end end diff --git a/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings.rb b/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings.rb index 222ee4e524e..0e38be3b4c9 100644 --- a/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings.rb +++ b/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings.rb @@ -37,14 +37,16 @@ module Gitlab next unless list_of_attrs.present? - create_links(list_of_attrs) - rescue ActiveRecord::RecordNotUnique - rescue StandardError => e - logger.error( - message: e.message, - class: self.class.name, - model_id: finding.id - ) + begin + create_links(list_of_attrs) + rescue ActiveRecord::RecordNotUnique + rescue StandardError => e + logger.error( + message: e.message, + class: self.class.name, + model_id: finding.id + ) + end end end @@ -65,10 +67,11 @@ module Gitlab def extract_links(metadata) parsed_metadata = Gitlab::Json.parse(metadata) + parsed_links = Array.wrap(parsed_metadata['links']) - return [] unless parsed_metadata['links'] + return [] if parsed_links.blank? - parsed_metadata['links'].compact.uniq + parsed_links.select { |link| link.try(:[], 'url').present? }.uniq end def logger diff --git a/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users.rb b/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users.rb deleted file mode 100644 index 7d150b9cd83..00000000000 --- a/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users.rb +++ /dev/null @@ -1,296 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Migrates author and committer names and emails from - # merge_request_diff_commits to two columns that point to - # merge_request_diff_commit_users. - # - # rubocop: disable Metrics/ClassLength - class MigrateMergeRequestDiffCommitUsers - # The number of user rows in merge_request_diff_commit_users to get in a - # single query. - USER_ROWS_PER_QUERY = 1_000 - - # The number of rows in merge_request_diff_commits to get in a single - # query. - COMMIT_ROWS_PER_QUERY = 1_000 - - # The number of rows in merge_request_diff_commits to update in a single - # query. - # - # Tests in staging revealed that increasing the number of updates per - # query translates to a longer total runtime for a migration. For example, - # given the same range of rows to migrate, 1000 updates per query required - # a total of roughly 15 seconds. On the other hand, 5000 updates per query - # required a total of roughly 25 seconds. For this reason, we use a value - # of 1000 rows per update. - UPDATES_PER_QUERY = 1_000 - - # rubocop: disable Style/Documentation - class MergeRequestDiffCommit < ActiveRecord::Base - include FromUnion - extend ::SuppressCompositePrimaryKeyWarning - - self.table_name = 'merge_request_diff_commits' - - # Yields each row to migrate in the given range. - # - # This method uses keyset pagination to ensure we don't retrieve - # potentially tens of thousands (or even hundreds of thousands) of rows - # in a single query. Such queries could time out, or increase the amount - # of memory needed to process the data. - # - # We can't use `EachBatch` and similar approaches, as - # merge_request_diff_commits doesn't have a single monotonically - # increasing primary key. - def self.each_row_to_migrate(start_id, stop_id, &block) - order = Pagination::Keyset::Order.build( - %w[merge_request_diff_id relative_order].map do |col| - Pagination::Keyset::ColumnOrderDefinition.new( - attribute_name: col, - order_expression: self.arel_table[col.to_sym].asc, - nullable: :not_nullable, - distinct: false - ) - end - ) - - scope = MergeRequestDiffCommit - .where(merge_request_diff_id: start_id...stop_id) - .order(order) - - Pagination::Keyset::Iterator - .new(scope: scope, use_union_optimization: true) - .each_batch(of: COMMIT_ROWS_PER_QUERY) { |rows| rows.each(&block) } - end - end - # rubocop: enable Style/Documentation - - # rubocop: disable Style/Documentation - class MergeRequestDiffCommitUser < ActiveRecord::Base - self.table_name = 'merge_request_diff_commit_users' - - def self.union(queries) - from("(#{queries.join("\nUNION ALL\n")}) #{table_name}") - end - end - # rubocop: enable Style/Documentation - - def perform(start_id, stop_id) - return if already_processed?(start_id, stop_id) - - # This Hash maps user names + emails to their corresponding rows in - # merge_request_diff_commit_users. - user_mapping = {} - - user_details, diff_rows_to_update = get_data_to_update(start_id, stop_id) - - get_user_rows_in_batches(user_details, user_mapping) - create_missing_users(user_details, user_mapping) - update_commit_rows(diff_rows_to_update, user_mapping) - - Database::BackgroundMigrationJob.mark_all_as_succeeded( - 'MigrateMergeRequestDiffCommitUsers', - [start_id, stop_id] - ) - end - - def already_processed?(start_id, stop_id) - Database::BackgroundMigrationJob - .for_migration_execution('MigrateMergeRequestDiffCommitUsers', [start_id, stop_id]) - .succeeded - .any? - end - - # Returns the data we'll use to determine what merge_request_diff_commits - # rows to update, and what data to use for populating their - # commit_author_id and committer_id columns. - def get_data_to_update(start_id, stop_id) - # This Set is used to retrieve users that already exist in - # merge_request_diff_commit_users. - users = Set.new - - # This Hash maps the primary key of every row in - # merge_request_diff_commits to the (trimmed) author and committer - # details to use for updating the row. - to_update = {} - - MergeRequestDiffCommit.each_row_to_migrate(start_id, stop_id) do |row| - author = [prepare(row.author_name), prepare(row.author_email)] - committer = [prepare(row.committer_name), prepare(row.committer_email)] - - to_update[[row.merge_request_diff_id, row.relative_order]] = - [author, committer] - - users << author if author[0] || author[1] - users << committer if committer[0] || committer[1] - end - - [users, to_update] - end - - # Gets any existing rows in merge_request_diff_commit_users in batches. - # - # This method may end up having to retrieve lots of rows. To reduce the - # overhead, we batch queries into a UNION query. We limit the number of - # queries per UNION so we don't end up sending a single query containing - # too many SELECT statements. - def get_user_rows_in_batches(users, user_mapping) - users.each_slice(USER_ROWS_PER_QUERY) do |pairs| - queries = pairs.map do |(name, email)| - MergeRequestDiffCommitUser.where(name: name, email: email).to_sql - end - - MergeRequestDiffCommitUser.union(queries).each do |row| - user_mapping[[row.name.to_s, row.email.to_s]] = row - end - end - end - - # Creates any users for which no row exists in - # merge_request_diff_commit_users. - # - # Not all users queried may exist yet, so we need to create any missing - # ones; making sure we handle concurrent creations of the same user - def create_missing_users(users, mapping) - create = [] - - users.each do |(name, email)| - create << { name: name, email: email } unless mapping[[name, email]] - end - - return if create.empty? - - MergeRequestDiffCommitUser - .insert_all(create, returning: %w[id name email]) - .each do |row| - mapping[[row['name'], row['email']]] = MergeRequestDiffCommitUser - .new(id: row['id'], name: row['name'], email: row['email']) - end - - # It's possible for (name, email) pairs to be inserted concurrently, - # resulting in the above insert not returning anything. Here we get any - # remaining users that were created concurrently. - get_user_rows_in_batches( - users.reject { |pair| mapping.key?(pair) }, - mapping - ) - end - - # Updates rows in merge_request_diff_commits with their new - # commit_author_id and committer_id values. - def update_commit_rows(to_update, user_mapping) - to_update.each_slice(UPDATES_PER_QUERY) do |slice| - updates = {} - - slice.each do |(diff_id, order), (author, committer)| - author_id = user_mapping[author]&.id - committer_id = user_mapping[committer]&.id - - updates[[diff_id, order]] = [author_id, committer_id] - end - - bulk_update_commit_rows(updates) - end - end - - # Bulk updates rows in the merge_request_diff_commits table with their new - # author and/or committer ID values. - # - # Updates are batched together to reduce the overhead of having to produce - # a single UPDATE for every row, as we may end up having to update - # thousands of rows at once. - # - # The query produced by this method is along the lines of the following: - # - # UPDATE merge_request_diff_commits - # SET commit_author_id = - # CASE - # WHEN (merge_request_diff_id, relative_order) = (x, y) THEN X - # WHEN ... - # END, - # committer_id = - # CASE - # WHEN (merge_request_diff_id, relative_order) = (x, y) THEN Y - # WHEN ... - # END - # WHERE (merge_request_diff_id, relative_order) IN ( (x, y), ... ) - # - # The `mapping` argument is a Hash in the following format: - # - # { [merge_request_diff_id, relative_order] => [author_id, committer_id] } - # - # rubocop: disable Metrics/AbcSize - def bulk_update_commit_rows(mapping) - author_case = Arel::Nodes::Case.new - committer_case = Arel::Nodes::Case.new - primary_values = [] - - mapping.each do |diff_id_and_order, (author_id, committer_id)| - primary_value = Arel::Nodes::Grouping.new(diff_id_and_order) - - primary_values << primary_value - - if author_id - author_case.when(primary_key.eq(primary_value)).then(author_id) - end - - if committer_id - committer_case.when(primary_key.eq(primary_value)).then(committer_id) - end - end - - if author_case.conditions.empty? && committer_case.conditions.empty? - return - end - - fields = [] - - # Statements such as `SET x = CASE END` are not valid SQL statements, so - # we omit setting an ID field if there are no values to populate it - # with. - if author_case.conditions.any? - fields << [arel_table[:commit_author_id], author_case] - end - - if committer_case.conditions.any? - fields << [arel_table[:committer_id], committer_case] - end - - query = Arel::UpdateManager.new - .table(arel_table) - .where(primary_key.in(primary_values)) - .set(fields) - .to_sql - - MergeRequestDiffCommit.connection.execute(query) - end - # rubocop: enable Metrics/AbcSize - - def primary_key - Arel::Nodes::Grouping.new( - [arel_table[:merge_request_diff_id], arel_table[:relative_order]] - ) - end - - def arel_table - MergeRequestDiffCommit.arel_table - end - - # Prepares a value to be inserted into a column in the table - # `merge_request_diff_commit_users`. Values in this table are limited to - # 512 characters. - # - # We treat empty strings as NULL values, as there's no point in (for - # example) storing a row where both the name and Email are an empty - # string. In addition, if we treated them differently we could end up with - # two rows: one where field X is NULL, and one where field X is an empty - # string. This is redundant, so we avoid storing such data. - def prepare(value) - value.present? ? value[0..511] : nil - end - end - # rubocop: enable Metrics/ClassLength - end -end diff --git a/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics.rb b/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics.rb deleted file mode 100644 index 68bbd3cfebb..00000000000 --- a/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # The class to migrate the context of project taggings from `tags` to `topics` - class MigrateProjectTaggingsContextFromTagsToTopics - # Temporary AR table for taggings - class Tagging < ActiveRecord::Base - include EachBatch - - self.table_name = 'taggings' - end - - def perform(start_id, stop_id) - Tagging.where(taggable_type: 'Project', context: 'tags', id: start_id..stop_id).each_batch(of: 500) do |relation| - relation.update_all(context: 'topics') - end - end - end - end -end diff --git a/lib/gitlab/background_migration/migrate_shared_vulnerability_identifiers.rb b/lib/gitlab/background_migration/migrate_shared_vulnerability_identifiers.rb new file mode 100644 index 00000000000..6a9f1692b72 --- /dev/null +++ b/lib/gitlab/background_migration/migrate_shared_vulnerability_identifiers.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # rubocop: disable Style/Documentation + class MigrateSharedVulnerabilityIdentifiers < BatchedMigrationJob + # rubocop: enable Style/Documentation + + feature_category :vulnerability_management + + def perform; end + end + end +end + +# rubocop: disable Layout/LineLength +Gitlab::BackgroundMigration::MigrateSharedVulnerabilityIdentifiers.prepend_mod_with("Gitlab::BackgroundMigration::MigrateSharedVulnerabilityIdentifiers") +# rubocop: enable Layout/LineLength diff --git a/lib/gitlab/background_migration/migrate_u2f_webauthn.rb b/lib/gitlab/background_migration/migrate_u2f_webauthn.rb deleted file mode 100644 index 83aa36a11e6..00000000000 --- a/lib/gitlab/background_migration/migrate_u2f_webauthn.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true -# rubocop:disable Style/Documentation - -module Gitlab - module BackgroundMigration - class MigrateU2fWebauthn - class U2fRegistration < ActiveRecord::Base - self.table_name = 'u2f_registrations' - end - - class WebauthnRegistration < ActiveRecord::Base - self.table_name = 'webauthn_registrations' - end - - def perform(start_id, end_id) - old_registrations = U2fRegistration.where(id: start_id..end_id) - old_registrations.each_slice(100) do |slice| - values = slice.map do |u2f_registration| - converter = Gitlab::Auth::U2fWebauthnConverter.new(u2f_registration) - converter.convert - end - - WebauthnRegistration.insert_all(values, unique_by: :credential_xid, returning: false) - end - end - end - end -end diff --git a/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature.rb b/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature.rb deleted file mode 100644 index 06422ed282f..00000000000 --- a/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # This migration moves projects.container_registry_enabled values to - # project_features.container_registry_access_level for the projects within - # the given range of ids. - class MoveContainerRegistryEnabledToProjectFeature - MAX_BATCH_SIZE = 300 - - ENABLED = 20 - DISABLED = 0 - - def perform(from_id, to_id) - (from_id..to_id).each_slice(MAX_BATCH_SIZE) do |batch| - process_batch(batch.first, batch.last) - end - - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded('MoveContainerRegistryEnabledToProjectFeature', [from_id, to_id]) - end - - private - - def process_batch(from_id, to_id) - ApplicationRecord.connection.execute(update_sql(from_id, to_id)) - - logger.info(message: "#{self.class}: Copied container_registry_enabled values for projects with IDs between #{from_id}..#{to_id}") - end - - # For projects that have a project_feature: - # Set project_features.container_registry_access_level to ENABLED (20) or DISABLED (0) - # depending if container_registry_enabled is true or false. - def update_sql(from_id, to_id) - <<~SQL - UPDATE project_features - SET container_registry_access_level = (CASE p.container_registry_enabled - WHEN true THEN #{ENABLED} - WHEN false THEN #{DISABLED} - ELSE #{DISABLED} - END) - FROM projects p - WHERE project_id = p.id AND - project_id BETWEEN #{from_id} AND #{to_id} - SQL - end - - def logger - @logger ||= Gitlab::BackgroundMigration::Logger.build - end - end - end -end diff --git a/lib/gitlab/background_migration/populate_namespace_statistics.rb b/lib/gitlab/background_migration/populate_namespace_statistics.rb deleted file mode 100644 index 97927ef48c2..00000000000 --- a/lib/gitlab/background_migration/populate_namespace_statistics.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # This class creates/updates those namespace statistics - # that haven't been created nor initialized. - # It also updates the related namespace statistics - class PopulateNamespaceStatistics - def perform(group_ids, statistics) - # Updating group statistics might involve calling Gitaly. - # For example, when calculating `wiki_size`, we will need - # to perform the request to check if the repo exists and - # also the repository size. - # - # The `allow_n_plus_1_calls` method is only intended for - # dev and test. It won't be raised in prod. - ::Gitlab::GitalyClient.allow_n_plus_1_calls do - relation(group_ids).each do |group| - upsert_namespace_statistics(group, statistics) - end - end - end - - private - - def upsert_namespace_statistics(group, statistics) - response = ::Groups::UpdateStatisticsService.new(group, statistics: statistics).execute - - error_message("#{response.message} group: #{group.id}") if response.error? - end - - def logger - @logger ||= ::Gitlab::BackgroundMigration::Logger.build - end - - def error_message(message) - logger.error(message: "Namespace Statistics Migration: #{message}") - end - - def relation(group_ids) - Group.includes(:namespace_statistics).where(id: group_ids) - end - end - end -end - -Gitlab::BackgroundMigration::PopulateNamespaceStatistics.prepend_mod_with('Gitlab::BackgroundMigration::PopulateNamespaceStatistics') diff --git a/lib/gitlab/background_migration/populate_test_reports_issue_id.rb b/lib/gitlab/background_migration/populate_test_reports_issue_id.rb deleted file mode 100644 index 301efd0c943..00000000000 --- a/lib/gitlab/background_migration/populate_test_reports_issue_id.rb +++ /dev/null @@ -1,14 +0,0 @@ -# frozen_string_literal: true -# rubocop: disable Style/Documentation - -module Gitlab - module BackgroundMigration - class PopulateTestReportsIssueId - def perform(start_id, stop_id) - # NO OP - end - end - end -end - -Gitlab::BackgroundMigration::PopulateTestReportsIssueId.prepend_mod diff --git a/lib/gitlab/background_migration/populate_topics_non_private_projects_count.rb b/lib/gitlab/background_migration/populate_topics_non_private_projects_count.rb deleted file mode 100644 index 1f2b55004e4..00000000000 --- a/lib/gitlab/background_migration/populate_topics_non_private_projects_count.rb +++ /dev/null @@ -1,36 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # The class to populates the non private projects counter of topics - class PopulateTopicsNonPrivateProjectsCount - SUB_BATCH_SIZE = 100 - - # Temporary AR model for topics - class Topic < ActiveRecord::Base - include EachBatch - - self.table_name = 'topics' - end - - def perform(start_id, stop_id) - Topic.where(id: start_id..stop_id).each_batch(of: SUB_BATCH_SIZE) do |batch| - ApplicationRecord.connection.execute(<<~SQL) - WITH batched_relation AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (#{batch.select(:id).limit(SUB_BATCH_SIZE).to_sql}) - UPDATE topics - SET non_private_projects_count = ( - SELECT COUNT(*) - FROM project_topics - INNER JOIN projects - ON project_topics.project_id = projects.id - WHERE project_topics.topic_id = batched_relation.id - AND projects.visibility_level > 0 - ) - FROM batched_relation - WHERE topics.id = batched_relation.id - SQL - end - end - end - end -end diff --git a/lib/gitlab/background_migration/populate_topics_total_projects_count_cache.rb b/lib/gitlab/background_migration/populate_topics_total_projects_count_cache.rb deleted file mode 100644 index 2495cb51364..00000000000 --- a/lib/gitlab/background_migration/populate_topics_total_projects_count_cache.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - SUB_BATCH_SIZE = 1_000 - - # The class to populates the total projects counter cache of topics - class PopulateTopicsTotalProjectsCountCache - # Temporary AR model for topics - class Topic < ActiveRecord::Base - include EachBatch - - self.table_name = 'topics' - end - - def perform(start_id, stop_id) - Topic.where(id: start_id..stop_id).each_batch(of: SUB_BATCH_SIZE) do |batch| - ApplicationRecord.connection.execute(<<~SQL) - WITH batched_relation AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (#{batch.select(:id).limit(SUB_BATCH_SIZE).to_sql}) - UPDATE topics - SET total_projects_count = (SELECT COUNT(*) FROM project_topics WHERE topic_id = batched_relation.id) - FROM batched_relation - WHERE topics.id = batched_relation.id - SQL - end - end - end - end -end diff --git a/lib/gitlab/background_migration/populate_uuids_for_security_findings.rb b/lib/gitlab/background_migration/populate_uuids_for_security_findings.rb deleted file mode 100644 index 175966b940d..00000000000 --- a/lib/gitlab/background_migration/populate_uuids_for_security_findings.rb +++ /dev/null @@ -1,18 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # rubocop:disable Style/Documentation - class PopulateUuidsForSecurityFindings - NOP_RELATION = Class.new { def each_batch(*); end } - - def self.security_findings - NOP_RELATION.new - end - - def perform(*_scan_ids); end - end - end -end - -Gitlab::BackgroundMigration::PopulateUuidsForSecurityFindings.prepend_mod_with('Gitlab::BackgroundMigration::PopulateUuidsForSecurityFindings') diff --git a/lib/gitlab/background_migration/populate_vulnerability_reads.rb b/lib/gitlab/background_migration/populate_vulnerability_reads.rb deleted file mode 100644 index 656c62d9ee5..00000000000 --- a/lib/gitlab/background_migration/populate_vulnerability_reads.rb +++ /dev/null @@ -1,84 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # rubocop:disable Style/Documentation - class PopulateVulnerabilityReads - include Gitlab::Database::DynamicModelHelpers - - PAUSE_SECONDS = 0.1 - - def perform(start_id, end_id, sub_batch_size) - vulnerability_model.where(id: start_id..end_id).each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) - connection.execute(insert_query(first, last)) - - sleep PAUSE_SECONDS - end - - mark_job_as_succeeded(start_id, end_id, sub_batch_size) - end - - private - - def vulnerability_model - define_batchable_model('vulnerabilities', connection: connection) - end - - def connection - ApplicationRecord.connection - end - - def insert_query(start_id, end_id) - <<~SQL - INSERT INTO vulnerability_reads ( - vulnerability_id, - project_id, - scanner_id, - report_type, - severity, - state, - has_issues, - resolved_on_default_branch, - uuid, - location_image - ) - SELECT - vulnerabilities.id, - vulnerabilities.project_id, - vulnerability_scanners.id, - vulnerabilities.report_type, - vulnerabilities.severity, - vulnerabilities.state, - CASE - WHEN - vulnerability_issue_links.vulnerability_id IS NOT NULL - THEN - true - ELSE - false - END - has_issues, - vulnerabilities.resolved_on_default_branch, - vulnerability_occurrences.uuid::uuid, - vulnerability_occurrences.location ->> 'image' - FROM - vulnerabilities - INNER JOIN vulnerability_occurrences ON vulnerability_occurrences.vulnerability_id = vulnerabilities.id - INNER JOIN vulnerability_scanners ON vulnerability_scanners.id = vulnerability_occurrences.scanner_id - LEFT JOIN vulnerability_issue_links ON vulnerability_issue_links.vulnerability_id = vulnerabilities.id - WHERE vulnerabilities.id BETWEEN #{start_id} AND #{end_id} - ON CONFLICT(vulnerability_id) DO NOTHING; - SQL - end - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - self.class.name.demodulize, - arguments - ) - end - end - # rubocop:enable Style/Documentation - end -end diff --git a/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid.rb b/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid.rb deleted file mode 100644 index 9a42d035285..00000000000 --- a/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid.rb +++ /dev/null @@ -1,218 +0,0 @@ -# frozen_string_literal: true - -# rubocop: disable Style/Documentation -class Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid # rubocop:disable Metrics/ClassLength - # rubocop: disable Gitlab/NamespacedClass - class VulnerabilitiesIdentifier < ActiveRecord::Base - self.table_name = "vulnerability_identifiers" - has_many :primary_findings, class_name: 'VulnerabilitiesFinding', inverse_of: :primary_identifier, foreign_key: 'primary_identifier_id' - end - - class VulnerabilitiesFinding < ActiveRecord::Base - include EachBatch - include ShaAttribute - - self.table_name = "vulnerability_occurrences" - - has_many :signatures, foreign_key: 'finding_id', class_name: 'VulnerabilityFindingSignature', inverse_of: :finding - belongs_to :primary_identifier, class_name: 'VulnerabilitiesIdentifier', inverse_of: :primary_findings, foreign_key: 'primary_identifier_id' - - REPORT_TYPES = { - sast: 0, - dependency_scanning: 1, - container_scanning: 2, - dast: 3, - secret_detection: 4, - coverage_fuzzing: 5, - api_fuzzing: 6, - cluster_image_scanning: 7, - generic: 99 - }.with_indifferent_access.freeze - enum report_type: REPORT_TYPES - - sha_attribute :fingerprint - sha_attribute :location_fingerprint - end - - class VulnerabilityFindingSignature < ActiveRecord::Base - include ShaAttribute - - self.table_name = 'vulnerability_finding_signatures' - belongs_to :finding, foreign_key: 'finding_id', inverse_of: :signatures, class_name: 'VulnerabilitiesFinding' - - sha_attribute :signature_sha - end - - class VulnerabilitiesFindingPipeline < ActiveRecord::Base - include EachBatch - self.table_name = "vulnerability_occurrence_pipelines" - end - - class Vulnerability < ActiveRecord::Base - include EachBatch - self.table_name = "vulnerabilities" - end - - class CalculateFindingUUID - FINDING_NAMESPACES_IDS = { - development: "a143e9e2-41b3-47bc-9a19-081d089229f4", - test: "a143e9e2-41b3-47bc-9a19-081d089229f4", - staging: "a6930898-a1b2-4365-ab18-12aa474d9b26", - production: "58dc0f06-936c-43b3-93bb-71693f1b6570" - }.freeze - - NAMESPACE_REGEX = /(\h{8})-(\h{4})-(\h{4})-(\h{4})-(\h{4})(\h{8})/.freeze - PACK_PATTERN = "NnnnnN" - - def self.call(value) - Digest::UUID.uuid_v5(namespace_id, value) - end - - def self.namespace_id - namespace_uuid = FINDING_NAMESPACES_IDS.fetch(Rails.env.to_sym) - # Digest::UUID is broken when using an UUID in namespace_id - # https://github.com/rails/rails/issues/37681#issue-520718028 - namespace_uuid.scan(NAMESPACE_REGEX).flatten.map { |s| s.to_i(16) }.pack(PACK_PATTERN) - end - end - # rubocop: enable Gitlab/NamespacedClass - - # rubocop: disable Metrics/AbcSize,Metrics/MethodLength,Metrics/BlockLength - def perform(start_id, end_id) - log_info('Migration started', start_id: start_id, end_id: end_id) - - VulnerabilitiesFinding - .joins(:primary_identifier) - .includes(:signatures) - .select(:id, :report_type, :primary_identifier_id, :fingerprint, :location_fingerprint, :project_id, :created_at, :vulnerability_id, :uuid) - .where(id: start_id..end_id) - .each_batch(of: 50) do |relation| - duplicates = find_duplicates(relation) - remove_findings(ids: duplicates) if duplicates.present? - - to_update = relation.reject { |finding| duplicates.include?(finding.id) } - - begin - known_uuids = Set.new - to_be_deleted = [] - - mappings = to_update.each_with_object({}) do |finding, hash| - uuid = calculate_uuid_v5_for_finding(finding) - - if known_uuids.add?(uuid) - hash[finding] = { uuid: uuid } - else - to_be_deleted << finding.id - end - end - - # It is technically still possible to have duplicate uuids - # if the data integrity is broken somehow and the primary identifiers of - # the findings are pointing to different projects with the same fingerprint values. - if to_be_deleted.present? - log_info('Conflicting UUIDs found within the batch', finding_ids: to_be_deleted) - - remove_findings(ids: to_be_deleted) - end - - ::Gitlab::Database::BulkUpdate.execute(%i[uuid], mappings) if mappings.present? - - log_info('Recalculation is done', finding_ids: mappings.keys.pluck(:id)) - rescue ActiveRecord::RecordNotUnique => error - log_info('RecordNotUnique error received') - - match_data = /\(uuid\)=\((?<uuid>\S{36})\)/.match(error.message) - - # This exception returns the **correct** UUIDv5 which probably comes from a later record - # and it's the one we can drop in the easiest way before retrying the UPDATE query - if match_data - uuid = match_data[:uuid] - log_info('Conflicting UUID found', uuid: uuid) - - id = VulnerabilitiesFinding.find_by(uuid: uuid)&.id - remove_findings(ids: id) if id - retry - else - log_error('Couldnt find conflicting uuid') - - Gitlab::ErrorTracking.track_and_raise_exception(error) - end - end - end - - mark_job_as_succeeded(start_id, end_id) - rescue StandardError => error - log_error('An exception happened') - - Gitlab::ErrorTracking.track_and_raise_exception(error) - end - # rubocop: disable Metrics/AbcSize,Metrics/MethodLength,Metrics/BlockLength - - private - - def find_duplicates(relation) - to_exclude = [] - relation.flat_map do |record| - # Assuming we're scanning id 31 and the duplicate is id 40 - # first we'd process 31 and add 40 to the list of ids to remove - # then we would process record 40 and add 31 to the list of removals - # so we would drop both records - to_exclude << record.id - - VulnerabilitiesFinding.where( - report_type: record.report_type, - location_fingerprint: record.location_fingerprint, - primary_identifier_id: record.primary_identifier_id, - project_id: record.project_id - ).where.not(id: to_exclude).pluck(:id) - end - end - - def remove_findings(ids:) - ids = Array(ids) - log_info('Removing Findings and associated records', ids: ids) - - vulnerability_ids = VulnerabilitiesFinding.where(id: ids).pluck(:vulnerability_id).uniq.compact - - VulnerabilitiesFindingPipeline.where(occurrence_id: ids).each_batch { |batch| batch.delete_all } - Vulnerability.where(id: vulnerability_ids).each_batch { |batch| batch.delete_all } - VulnerabilitiesFinding.where(id: ids).delete_all - end - - def calculate_uuid_v5_for_finding(vulnerability_finding) - return unless vulnerability_finding - - signatures = vulnerability_finding.signatures.sort_by { |signature| signature.algorithm_type_before_type_cast } - location_fingerprint = signatures.last&.signature_sha || vulnerability_finding.location_fingerprint - - uuid_v5_name_components = { - report_type: vulnerability_finding.report_type, - primary_identifier_fingerprint: vulnerability_finding.fingerprint, - location_fingerprint: location_fingerprint, - project_id: vulnerability_finding.project_id - } - - name = uuid_v5_name_components.values.join('-') - - CalculateFindingUUID.call(name) - end - - def log_info(message, **extra) - logger.info(migrator: 'RecalculateVulnerabilitiesOccurrencesUuid', message: message, **extra) - end - - def log_error(message, **extra) - logger.error(migrator: 'RecalculateVulnerabilitiesOccurrencesUuid', message: message, **extra) - end - - def logger - @logger ||= Gitlab::BackgroundMigration::Logger.build - end - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - 'RecalculateVulnerabilitiesOccurrencesUuid', - arguments - ) - end -end diff --git a/lib/gitlab/background_migration/recalculate_vulnerability_finding_signatures_for_findings.rb b/lib/gitlab/background_migration/recalculate_vulnerability_finding_signatures_for_findings.rb deleted file mode 100644 index 20200a1d508..00000000000 --- a/lib/gitlab/background_migration/recalculate_vulnerability_finding_signatures_for_findings.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # rubocop: disable Style/Documentation - class RecalculateVulnerabilityFindingSignaturesForFindings - def perform(start_id, stop_id) - end - end - end -end - -Gitlab::BackgroundMigration::RecalculateVulnerabilityFindingSignaturesForFindings.prepend_mod diff --git a/lib/gitlab/background_migration/remove_all_trace_expiration_dates.rb b/lib/gitlab/background_migration/remove_all_trace_expiration_dates.rb deleted file mode 100644 index d47aa76f24b..00000000000 --- a/lib/gitlab/background_migration/remove_all_trace_expiration_dates.rb +++ /dev/null @@ -1,53 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Removing expire_at timestamps that shouldn't have - # been written to traces on gitlab.com. - class RemoveAllTraceExpirationDates - include Gitlab::Database::MigrationHelpers - - BATCH_SIZE = 1_000 - - # Stubbed class to connect to the CI database - # connects_to has to be called in abstract classes. - class MultiDbAdaptableClass < ActiveRecord::Base - self.abstract_class = true - - if Gitlab::Database.has_config?(:ci) - connects_to database: { writing: :ci, reading: :ci } - end - end - - # Stubbed class to access the ci_job_artifacts table - class JobArtifact < MultiDbAdaptableClass - include EachBatch - - self.table_name = 'ci_job_artifacts' - - TARGET_TIMESTAMPS = [ - Date.new(2021, 04, 22).midnight.utc, - Date.new(2021, 05, 22).midnight.utc, - Date.new(2021, 06, 22).midnight.utc, - Date.new(2022, 01, 22).midnight.utc, - Date.new(2022, 02, 22).midnight.utc, - Date.new(2022, 03, 22).midnight.utc, - Date.new(2022, 04, 22).midnight.utc - ].freeze - - scope :traces, -> { where(file_type: 3) } - scope :between, -> (start_id, end_id) { where(id: start_id..end_id) } - scope :in_targeted_timestamps, -> { where(expire_at: TARGET_TIMESTAMPS) } - end - - def perform(start_id, end_id) - return unless Gitlab.com? - - JobArtifact.traces - .between(start_id, end_id) - .in_targeted_timestamps - .each_batch(of: BATCH_SIZE) { |batch| batch.update_all(expire_at: nil) } - end - end - end -end diff --git a/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb b/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb deleted file mode 100644 index 15799659b55..00000000000 --- a/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings.rb +++ /dev/null @@ -1,64 +0,0 @@ -# frozen_string_literal: true - -# rubocop: disable Style/Documentation -class Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings - DELETE_BATCH_SIZE = 50 - - # rubocop:disable Gitlab/NamespacedClass - class VulnerabilitiesFinding < ActiveRecord::Base - self.table_name = "vulnerability_occurrences" - end - # rubocop:enable Gitlab/NamespacedClass - - # rubocop:disable Gitlab/NamespacedClass - class Vulnerability < ActiveRecord::Base - self.table_name = "vulnerabilities" - end - # rubocop:enable Gitlab/NamespacedClass - - def perform(start_id, end_id) - batch = VulnerabilitiesFinding.where(id: start_id..end_id) - - cte = Gitlab::SQL::CTE.new(:batch, batch.select(:report_type, :location_fingerprint, :primary_identifier_id, :project_id)) - - query = VulnerabilitiesFinding - .select('batch.report_type', 'batch.location_fingerprint', 'batch.primary_identifier_id', 'batch.project_id', 'array_agg(id) as ids') - .distinct - .with(cte.to_arel) - .from(cte.alias_to(Arel.sql('batch'))) - .joins( - %( - INNER JOIN - vulnerability_occurrences ON - vulnerability_occurrences.report_type = batch.report_type AND - vulnerability_occurrences.location_fingerprint = batch.location_fingerprint AND - vulnerability_occurrences.primary_identifier_id = batch.primary_identifier_id AND - vulnerability_occurrences.project_id = batch.project_id - )).group('batch.report_type', 'batch.location_fingerprint', 'batch.primary_identifier_id', 'batch.project_id') - .having('COUNT(*) > 1') - - ids_to_delete = [] - - query.to_a.each do |record| - # We want to keep the latest finding since it might have recent metadata - duplicate_ids = record.ids.uniq.sort - duplicate_ids.pop - ids_to_delete.concat(duplicate_ids) - - if ids_to_delete.size == DELETE_BATCH_SIZE - delete_findings_and_vulnerabilities(ids_to_delete) - ids_to_delete.clear - end - end - - delete_findings_and_vulnerabilities(ids_to_delete) if ids_to_delete.any? - end - - private - - def delete_findings_and_vulnerabilities(ids) - vulnerability_ids = VulnerabilitiesFinding.where(id: ids).pluck(:vulnerability_id).compact - VulnerabilitiesFinding.where(id: ids).delete_all - Vulnerability.where(id: vulnerability_ids).delete_all - end -end diff --git a/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups.rb b/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups.rb new file mode 100644 index 00000000000..879e52c96bf --- /dev/null +++ b/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # A job to remove `project_group_links` records whose associated group + # does not exist in `namespaces` table anymore. + class RemoveProjectGroupLinkWithMissingGroups < Gitlab::BackgroundMigration::BatchedMigrationJob + scope_to ->(relation) { relation } + operation_name :delete_all + feature_category :subgroups + + def perform + each_sub_batch do |sub_batch| + records = sub_batch.joins( + "LEFT OUTER JOIN namespaces ON namespaces.id = project_group_links.group_id AND namespaces.type = 'Group'" + ).where(namespaces: { id: nil }) + + ids = records.map(&:id) + + next if ids.empty? + + Gitlab::AppLogger.info({ message: 'Removing project group link with non-existent groups', + deleted_count: ids.count, + ids: ids }) + + records.delete_all + end + end + end + end +end diff --git a/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users.rb b/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users.rb deleted file mode 100644 index 43a7032e682..00000000000 --- a/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users.rb +++ /dev/null @@ -1,33 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # A background migration that finished any pending - # MigrateMergeRequestDiffCommitUsers jobs, and schedules new jobs itself. - # - # This migration exists so we can bypass rescheduling issues (e.g. jobs - # getting dropped after too many retries) that may occur when - # MigrateMergeRequestDiffCommitUsers jobs take longer than expected. - class StealMigrateMergeRequestDiffCommitUsers - def perform(start_id, stop_id) - MigrateMergeRequestDiffCommitUsers.new.perform(start_id, stop_id) - schedule_next_job - end - - def schedule_next_job - next_job = Database::BackgroundMigrationJob - .for_migration_class('MigrateMergeRequestDiffCommitUsers') - .pending - .first - - return unless next_job - - BackgroundMigrationWorker.perform_in( - 5.minutes, - 'StealMigrateMergeRequestDiffCommitUsers', - next_job.arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/update_timelogs_null_spent_at.rb b/lib/gitlab/background_migration/update_timelogs_null_spent_at.rb deleted file mode 100644 index b61f2ee7f4c..00000000000 --- a/lib/gitlab/background_migration/update_timelogs_null_spent_at.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Class to populate spent_at for timelogs - class UpdateTimelogsNullSpentAt - include Gitlab::Database::DynamicModelHelpers - - BATCH_SIZE = 100 - - def perform(start_id, stop_id) - define_batchable_model('timelogs', connection: connection) - .where(spent_at: nil, id: start_id..stop_id) - .each_batch(of: 100) do |subbatch| - batch_start, batch_end = subbatch.pick('min(id), max(id)') - - update_timelogs(batch_start, batch_end) - end - end - - def update_timelogs(batch_start, batch_stop) - execute(<<~SQL) - UPDATE timelogs - SET spent_at = created_at - WHERE spent_at IS NULL - AND timelogs.id BETWEEN #{batch_start} AND #{batch_stop}; - SQL - end - - def connection - @connection ||= ApplicationRecord.connection - end - - def execute(sql) - connection.execute(sql) - end - end - end -end diff --git a/lib/gitlab/background_migration/update_timelogs_project_id.rb b/lib/gitlab/background_migration/update_timelogs_project_id.rb deleted file mode 100644 index 69bb5cf6e6d..00000000000 --- a/lib/gitlab/background_migration/update_timelogs_project_id.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # Class to populate project_id for timelogs - class UpdateTimelogsProjectId - BATCH_SIZE = 1000 - - def perform(start_id, stop_id) - (start_id..stop_id).step(BATCH_SIZE).each do |offset| - update_issue_timelogs(offset, offset + BATCH_SIZE) - update_merge_request_timelogs(offset, offset + BATCH_SIZE) - end - end - - def update_issue_timelogs(batch_start, batch_stop) - execute(<<~SQL) - UPDATE timelogs - SET project_id = issues.project_id - FROM issues - WHERE issues.id = timelogs.issue_id - AND timelogs.id BETWEEN #{batch_start} AND #{batch_stop} - AND timelogs.project_id IS NULL; - SQL - end - - def update_merge_request_timelogs(batch_start, batch_stop) - execute(<<~SQL) - UPDATE timelogs - SET project_id = merge_requests.target_project_id - FROM merge_requests - WHERE merge_requests.id = timelogs.merge_request_id - AND timelogs.id BETWEEN #{batch_start} AND #{batch_stop} - AND timelogs.project_id IS NULL; - SQL - end - - def execute(sql) - @connection ||= ApplicationRecord.connection - @connection.execute(sql) - end - end - end -end diff --git a/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group.rb b/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group.rb deleted file mode 100644 index 10db9f5064a..00000000000 --- a/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group.rb +++ /dev/null @@ -1,129 +0,0 @@ -# frozen_string_literal: true -# rubocop:disable Style/Documentation - -module Gitlab - module BackgroundMigration - class UpdateUsersWhereTwoFactorAuthRequiredFromGroup # rubocop:disable Metrics/ClassLength - def perform(start_id, stop_id) - ApplicationRecord.connection.execute <<~SQL - UPDATE - users - SET - require_two_factor_authentication_from_group = TRUE - WHERE - users.id BETWEEN #{start_id} - AND #{stop_id} - AND users.require_two_factor_authentication_from_group = FALSE - AND users.id IN ( - SELECT - DISTINCT users_groups_query.user_id - FROM - ( - SELECT - users.id AS user_id, - members.source_id AS group_ids - FROM - users - LEFT JOIN members ON members.source_type = 'Namespace' - AND members.requested_at IS NULL - AND members.user_id = users.id - AND members.type = 'GroupMember' - WHERE - users.require_two_factor_authentication_from_group = FALSE - AND users.id BETWEEN #{start_id} - AND #{stop_id}) AS users_groups_query - INNER JOIN LATERAL ( - WITH RECURSIVE "base_and_ancestors" AS ( - ( - SELECT - "namespaces"."type", - "namespaces"."id", - "namespaces"."parent_id", - "namespaces"."require_two_factor_authentication" - FROM - "namespaces" - WHERE - "namespaces"."type" = 'Group' - AND "namespaces"."id" = users_groups_query.group_ids - ) - UNION - ( - SELECT - "namespaces"."type", - "namespaces"."id", - "namespaces"."parent_id", - "namespaces"."require_two_factor_authentication" - FROM - "namespaces", - "base_and_ancestors" - WHERE - "namespaces"."type" = 'Group' - AND "namespaces"."id" = "base_and_ancestors"."parent_id" - ) - ), - "base_and_descendants" AS ( - ( - SELECT - "namespaces"."type", - "namespaces"."id", - "namespaces"."parent_id", - "namespaces"."require_two_factor_authentication" - FROM - "namespaces" - WHERE - "namespaces"."type" = 'Group' - AND "namespaces"."id" = users_groups_query.group_ids - ) - UNION - ( - SELECT - "namespaces"."type", - "namespaces"."id", - "namespaces"."parent_id", - "namespaces"."require_two_factor_authentication" - FROM - "namespaces", - "base_and_descendants" - WHERE - "namespaces"."type" = 'Group' - AND "namespaces"."parent_id" = "base_and_descendants"."id" - ) - ) - SELECT - "namespaces".* - FROM - ( - ( - SELECT - "namespaces"."type", - "namespaces"."id", - "namespaces"."parent_id", - "namespaces"."require_two_factor_authentication" - FROM - "base_and_ancestors" AS "namespaces" - WHERE - "namespaces"."type" = 'Group' - ) - UNION - ( - SELECT - "namespaces"."type", - "namespaces"."id", - "namespaces"."parent_id", - "namespaces"."require_two_factor_authentication" - FROM - "base_and_descendants" AS "namespaces" - WHERE - "namespaces"."type" = 'Group' - ) - ) namespaces - WHERE - "namespaces"."type" = 'Group' - AND "namespaces"."require_two_factor_authentication" = TRUE - ) AS hierarchy_tree ON TRUE - ); - SQL - end - end - end -end diff --git a/lib/gitlab/background_migration/update_vulnerability_occurrences_location.rb b/lib/gitlab/background_migration/update_vulnerability_occurrences_location.rb deleted file mode 100644 index 458e0537f1c..00000000000 --- a/lib/gitlab/background_migration/update_vulnerability_occurrences_location.rb +++ /dev/null @@ -1,14 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # rubocop: disable Style/Documentation - class UpdateVulnerabilityOccurrencesLocation - def perform(start_id, stop_id) - end - end - # rubocop: enable Style/Documentation - end -end - -Gitlab::BackgroundMigration::UpdateVulnerabilityOccurrencesLocation.prepend_mod_with('Gitlab::BackgroundMigration::UpdateVulnerabilityOccurrencesLocation') diff --git a/lib/gitlab/bullet/exclusions.rb b/lib/gitlab/bullet/exclusions.rb index f897ff492d9..406d0a80a07 100644 --- a/lib/gitlab/bullet/exclusions.rb +++ b/lib/gitlab/bullet/exclusions.rb @@ -27,7 +27,8 @@ module Gitlab def exclusions @exclusions ||= if File.exist?(config_file) - YAML.load_file(config_file)['exclusions']&.values || [] + config = YAML.safe_load_file(config_file, permitted_classes: [Range]) + config['exclusions']&.values || [] else [] end diff --git a/lib/gitlab/cache/client.rb b/lib/gitlab/cache/client.rb index ac710ee0adf..37d6cac8d43 100644 --- a/lib/gitlab/cache/client.rb +++ b/lib/gitlab/cache/client.rb @@ -11,17 +11,14 @@ module Gitlab # @param cache_identifier [String] defines the location of the cache definition # Example: "ProtectedBranches::CacheService#fetch" # @param feature_category [Symbol] name of the feature category (from config/feature_categories.yml) - # @param caller_id [String] caller id from labkit context # @param backing_resource [Symbol] most affected resource by cache generation (full list: VALID_BACKING_RESOURCES) # @return [Gitlab::Cache::Client] def self.build_with_metadata( cache_identifier:, feature_category:, - caller_id: Gitlab::ApplicationContext.current_context_attribute(:caller_id), backing_resource: DEFAULT_BACKING_RESOURCE ) new(Metadata.new( - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource diff --git a/lib/gitlab/cache/metadata.rb b/lib/gitlab/cache/metadata.rb index 224f215ef82..de35b332300 100644 --- a/lib/gitlab/cache/metadata.rb +++ b/lib/gitlab/cache/metadata.rb @@ -9,22 +9,19 @@ module Gitlab # @param cache_identifier [String] defines the location of the cache definition # Example: "ProtectedBranches::CacheService#fetch" # @param feature_category [Symbol] name of the feature category (from config/feature_categories.yml) - # @param caller_id [String] caller id from labkit context # @param backing_resource [Symbol] most affected resource by cache generation (full list: VALID_BACKING_RESOURCES) # @return [Gitlab::Cache::Metadata] def initialize( cache_identifier:, feature_category:, - caller_id: Gitlab::ApplicationContext.current_context_attribute(:caller_id), backing_resource: Client::DEFAULT_BACKING_RESOURCE ) @cache_identifier = cache_identifier @feature_category = Gitlab::FeatureCategories.default.get!(feature_category) - @caller_id = caller_id @backing_resource = fetch_backing_resource!(backing_resource) end - attr_reader :caller_id, :cache_identifier, :feature_category, :backing_resource + attr_reader :cache_identifier, :feature_category, :backing_resource private diff --git a/lib/gitlab/cache/metrics.rb b/lib/gitlab/cache/metrics.rb index 00d4e6e4d4e..d9c80f076b9 100644 --- a/lib/gitlab/cache/metrics.rb +++ b/lib/gitlab/cache/metrics.rb @@ -58,7 +58,6 @@ module Gitlab def labels @labels ||= { - caller_id: cache_metadata.caller_id, cache_identifier: cache_metadata.cache_identifier, feature_category: cache_metadata.feature_category, backing_resource: cache_metadata.backing_resource diff --git a/lib/gitlab/checks/matching_merge_request.rb b/lib/gitlab/checks/matching_merge_request.rb index e5ce862264f..15178597a99 100644 --- a/lib/gitlab/checks/matching_merge_request.rb +++ b/lib/gitlab/checks/matching_merge_request.rb @@ -17,7 +17,7 @@ module Gitlab # # 1. Sidekiq: MergeService runs and updates the merge request in a locked state. # 2. Gitaly: The UserMergeBranch RPC runs. - # 3. Gitaly (gitaly-ruby): This RPC calls the pre-receive hook. + # 3. Gitaly: The RPC calls the pre-receive hook. # 4. Rails: This hook makes an API request to /api/v4/internal/allowed. # 5. Rails: This API check does a SQL query for locked merge # requests with a matching SHA. diff --git a/lib/gitlab/ci/ansi2json.rb b/lib/gitlab/ci/ansi2json.rb index 79114d35916..70b68c7b821 100644 --- a/lib/gitlab/ci/ansi2json.rb +++ b/lib/gitlab/ci/ansi2json.rb @@ -4,8 +4,8 @@ module Gitlab module Ci module Ansi2json - def self.convert(ansi, state = nil) - Converter.new.convert(ansi, state) + def self.convert(ansi, state = nil, verify_state: false) + Converter.new.convert(ansi, state, verify_state: verify_state) end end end diff --git a/lib/gitlab/ci/ansi2json/converter.rb b/lib/gitlab/ci/ansi2json/converter.rb index 78f6c5bf0aa..84541208a2f 100644 --- a/lib/gitlab/ci/ansi2json/converter.rb +++ b/lib/gitlab/ci/ansi2json/converter.rb @@ -4,9 +4,13 @@ module Gitlab module Ci module Ansi2json class Converter - def convert(stream, new_state) + def convert(stream, new_state, verify_state: false) @lines = [] - @state = State.new(new_state, stream.size) + @state = if verify_state + SignedState.new(new_state, stream.size) + else + State.new(new_state, stream.size) + end append = false truncated = false diff --git a/lib/gitlab/ci/ansi2json/parser.rb b/lib/gitlab/ci/ansi2json/parser.rb index fdd49df1e24..1d26bceb7b1 100644 --- a/lib/gitlab/ci/ansi2json/parser.rb +++ b/lib/gitlab/ci/ansi2json/parser.rb @@ -9,14 +9,14 @@ module Gitlab class Parser # keys represent the trailing digit in color changing command (30-37, 40-47, 90-97. 100-107) COLOR = { - 0 => 'black', # not that this is gray in the intense color table + 0 => 'black', # Note: This is gray in the intense color table. 1 => 'red', 2 => 'green', 3 => 'yellow', 4 => 'blue', 5 => 'magenta', 6 => 'cyan', - 7 => 'white' # not that this is gray in the dark (aka default) color table + 7 => 'white' # Note: This is gray in the dark (aka default) color table. }.freeze STYLE_SWITCHES = { diff --git a/lib/gitlab/ci/ansi2json/signed_state.rb b/lib/gitlab/ci/ansi2json/signed_state.rb new file mode 100644 index 00000000000..98e2419f0a8 --- /dev/null +++ b/lib/gitlab/ci/ansi2json/signed_state.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'openssl' + +module Gitlab + module Ci + module Ansi2json + class SignedState < ::Gitlab::Ci::Ansi2json::State + include Gitlab::Utils::StrongMemoize + + SIGNATURE_KEY_SALT = 'gitlab-ci-ansi2json-state' + SEPARATOR = '--' + + def encode + encoded = super + + encoded + SEPARATOR + sign(encoded) + end + + private + + def sign(message) + ::OpenSSL::HMAC.hexdigest( + signature_digest, + signature_key, + message + ) + end + + def verify(signed_message) + signature_length = signature_digest.digest_length * 2 # a byte is exactly two hexadecimals + message_length = signed_message.length - SEPARATOR.length - signature_length + return if message_length <= 0 + + signature = signed_message.last(signature_length) + message = signed_message.first(message_length) + return unless valid_signature?(message, signature) + + message + end + + def valid_signature?(message, signature) + expected_signature = sign(message) + expected_signature.bytesize == signature.bytesize && + ::OpenSSL.fixed_length_secure_compare(signature, expected_signature) + end + + def decode_state(data) + return if data.blank? + + encoded_state = verify(data) + if encoded_state.blank? + ::Gitlab::AppLogger.warn(message: "#{self.class}: signature missing or invalid", invalid_state: data) + return + end + + decoded_state = Base64.urlsafe_decode64(encoded_state) + return unless decoded_state.present? + + ::Gitlab::Json.parse(decoded_state) + end + + def signature_digest + ::OpenSSL::Digest.new('SHA256') + end + + def signature_key + ::Gitlab::Application.key_generator.generate_key(SIGNATURE_KEY_SALT, signature_digest.block_length) + end + strong_memoize_attr :signature_key + end + end + end +end diff --git a/lib/gitlab/ci/ansi2json/state.rb b/lib/gitlab/ci/ansi2json/state.rb index b2b6ce649ed..279e1929b22 100644 --- a/lib/gitlab/ci/ansi2json/state.rb +++ b/lib/gitlab/ci/ansi2json/state.rb @@ -18,12 +18,13 @@ module Gitlab end def encode - state = { + json = { offset: @last_line_offset, style: @current_line.style.to_h, open_sections: @open_sections - } - Base64.urlsafe_encode64(state.to_json) + }.to_json + + Base64.urlsafe_encode64(json, padding: false) end def open_section(section, timestamp, options) @@ -91,7 +92,20 @@ module Gitlab decoded_state = Base64.urlsafe_decode64(state) return unless decoded_state.present? - Gitlab::Json.parse(decoded_state) + ::Gitlab::Json.parse(decoded_state) + rescue ArgumentError, JSON::ParserError => error + # This rescue is so that we don't break during the rollout or rollback + # of `sign_and_verify_ansi2json_state`, because we may receive a + # signed state even when the flag is disabled, and this would result + # in invalid Base64 (ArgumentError) or invalid JSON in case the signed + # state happens to decode as valid Base64 (JSON::ParserError). + # + # Once the flag has been fully rolled out this should not + # be possible (it would imply a backend bug) and we not rescue from + # this. + ::Gitlab::AppLogger.warn(message: "#{self.class}: decode error", invalid_state: state, error: error) + + nil end end end diff --git a/lib/gitlab/ci/build/cache.rb b/lib/gitlab/ci/build/cache.rb index 1cddc9fcc98..3432ecdb250 100644 --- a/lib/gitlab/ci/build/cache.rb +++ b/lib/gitlab/ci/build/cache.rb @@ -9,8 +9,15 @@ module Gitlab def initialize(cache, pipeline) cache = Array.wrap(cache) @cache = cache.map.with_index do |cache, index| - Gitlab::Ci::Pipeline::Seed::Build::Cache - .new(pipeline, cache, index) + if Feature.enabled?(:ci_fix_for_runner_cache_prefix) + prefix = cache_prefix(cache, index) + + Gitlab::Ci::Pipeline::Seed::Build::Cache + .new(pipeline, cache, prefix) + else + Gitlab::Ci::Pipeline::Seed::Build::Cache + .new(pipeline, cache, index) + end end end @@ -23,6 +30,18 @@ module Gitlab end end end + + private + + def cache_prefix(cache, index) + files = cache.dig(:key, :files) if cache.is_a?(Hash) && cache[:key].is_a?(Hash) + + return index if files.blank? + + filenames = files.map { |file| file.split('.').first }.join('_') + + "#{index}_#{filenames}" + end end end end diff --git a/lib/gitlab/ci/components/header.rb b/lib/gitlab/ci/components/header.rb deleted file mode 100644 index 732874d7a88..00000000000 --- a/lib/gitlab/ci/components/header.rb +++ /dev/null @@ -1,42 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Ci - module Components - ## - # Components::Header class represents full component specification that is being prepended as first YAML document - # in the CI Component file. - # - class Header - attr_reader :errors - - def initialize(header) - @header = header - @errors = [] - end - - def empty? - inputs_spec.to_h.empty? - end - - def inputs(args) - @input ||= Ci::Input::Inputs.new(inputs_spec, args) - end - - def context(args) - inputs(args).then do |input| - raise ArgumentError unless input.valid? - - Ci::Interpolation::Context.new({ inputs: input.to_hash }) - end - end - - private - - def inputs_spec - @header.dig(:spec, :inputs) - end - end - end - end -end diff --git a/lib/gitlab/ci/components/instance_path.rb b/lib/gitlab/ci/components/instance_path.rb index 010ce57d2a0..27a7611ffdd 100644 --- a/lib/gitlab/ci/components/instance_path.rb +++ b/lib/gitlab/ci/components/instance_path.rb @@ -6,6 +6,8 @@ module Gitlab class InstancePath include Gitlab::Utils::StrongMemoize + LATEST_VERSION_KEYWORD = '~latest' + def self.match?(address) address.include?('@') && address.start_with?(Settings.gitlab_ci['component_fqdn']) end @@ -39,9 +41,9 @@ module Gitlab File.join(component_dir, @content_filename).delete_prefix('/') end - # TODO: Add support when version is a released tag and "~latest" moving target def sha return unless project + return latest_version_sha if version == LATEST_VERSION_KEYWORD project.commit(version)&.id end @@ -69,6 +71,12 @@ module Gitlab ::Project.where_full_path_in(possible_paths).take # rubocop: disable CodeReuse/ActiveRecord end + + def latest_version_sha + return unless catalog_resource = project&.catalog_resource + + catalog_resource.latest_version&.sha + end end end end diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb index 534b84afc23..0c293c3f0ef 100644 --- a/lib/gitlab/ci/config.rb +++ b/lib/gitlab/ci/config.rb @@ -9,7 +9,7 @@ module Gitlab include Gitlab::Utils::StrongMemoize ConfigError = Class.new(StandardError) - TIMEOUT_SECONDS = 30.seconds + TIMEOUT_SECONDS = ENV.fetch('GITLAB_CI_CONFIG_FETCH_TIMEOUT_SECONDS', 30).to_i.clamp(0, 60).seconds TIMEOUT_MESSAGE = 'Request timed out when fetching configuration files.' RESCUE_ERRORS = [ diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb index 2390ba05916..d31d1b366c3 100644 --- a/lib/gitlab/ci/config/entry/job.rb +++ b/lib/gitlab/ci/config/entry/job.rb @@ -14,7 +14,7 @@ module Gitlab ALLOWED_KEYS = %i[tags script image services start_in artifacts cache dependencies before_script after_script hooks environment coverage retry parallel interruptible timeout - release id_tokens].freeze + release id_tokens publish].freeze validations do validates :config, allowed_keys: Gitlab::Ci::Config::Entry::Job.allowed_keys + PROCESSABLE_ALLOWED_KEYS @@ -45,6 +45,8 @@ module Gitlab errors.add(:dependencies, "the #{missing_needs.join(", ")} should be part of needs") if missing_needs.any? end end + + validates :publish, absence: { message: "can only be used within a `pages` job" }, unless: -> { pages_job? } end entry :before_script, Entry::Commands, @@ -125,10 +127,14 @@ module Gitlab inherit: false, metadata: { composable_class: ::Gitlab::Ci::Config::Entry::IdToken } + entry :publish, Entry::Publish, + description: 'Path to be published with Pages', + inherit: false + attributes :script, :tags, :when, :dependencies, :needs, :retry, :parallel, :start_in, :interruptible, :timeout, - :release, :allow_failure + :release, :allow_failure, :publish def self.matching?(name, config) !name.to_s.start_with?('.') && @@ -169,7 +175,8 @@ module Gitlab allow_failure_criteria: allow_failure_criteria, needs: needs_defined? ? needs_value : nil, scheduling_type: needs_defined? ? :dag : :stage, - id_tokens: id_tokens_value + id_tokens: id_tokens_value, + publish: publish ).compact end @@ -177,6 +184,10 @@ module Gitlab allow_failure_defined? ? static_allow_failure : manual_action? end + def pages_job? + name == :pages + end + def self.allowed_keys ALLOWED_KEYS end diff --git a/lib/gitlab/ci/config/entry/publish.rb b/lib/gitlab/ci/config/entry/publish.rb new file mode 100644 index 00000000000..52a2487009e --- /dev/null +++ b/lib/gitlab/ci/config/entry/publish.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + class Config + module Entry + ## + # Entry that represents the path to be published with Pages. + # + class Publish < ::Gitlab::Config::Entry::Node + include ::Gitlab::Config::Entry::Validatable + + validations do + validates :config, type: String + end + + def self.default + 'public' + end + end + end + end + end +end diff --git a/lib/gitlab/ci/config/external/file/artifact.rb b/lib/gitlab/ci/config/external/file/artifact.rb index 0b90d240a15..273d78bd583 100644 --- a/lib/gitlab/ci/config/external/file/artifact.rb +++ b/lib/gitlab/ci/config/external/file/artifact.rb @@ -22,7 +22,7 @@ module Gitlab strong_memoize(:content) do Gitlab::Ci::ArtifactFileReader.new(artifact_job).read(location) rescue Gitlab::Ci::ArtifactFileReader::Error => error - errors.push(error.message) + errors.push(error.message) # TODO this memoizes the error message as a content! end end diff --git a/lib/gitlab/ci/config/external/file/base.rb b/lib/gitlab/ci/config/external/file/base.rb index 7060754a670..553f2a2d754 100644 --- a/lib/gitlab/ci/config/external/file/base.rb +++ b/lib/gitlab/ci/config/external/file/base.rb @@ -61,18 +61,6 @@ module Gitlab [params, context.project&.full_path, context.sha].hash end - def load_and_validate_expanded_hash! - context.logger.instrument(:config_file_fetch_content_hash) do - content_hash # calling the method loads then memoizes the result - end - - context.logger.instrument(:config_file_expand_content_includes) do - expanded_content_hash # calling the method expands then memoizes the result - end - - validate_hash! - end - # This method is overridden to load context into the memoized result # or to lazily load context via BatchLoader def preload_context @@ -94,32 +82,59 @@ module Gitlab end def validate_context! - raise NotImplementedError, 'subclass must implement validate_context' + raise NotImplementedError, 'subclass must implement `validate_context!`' end def validate_content! - if content.blank? - errors.push("Included file `#{masked_location}` is empty or does not exist!") + errors.push("Included file `#{masked_location}` is empty or does not exist!") if content.blank? + end + + def load_and_validate_expanded_hash! + context.logger.instrument(:config_file_fetch_content_hash) do + content_result # calling the method loads YAML then memoizes the content result + end + + context.logger.instrument(:config_file_interpolate_result) do + interpolator.interpolate! + end + + return validate_interpolation! unless interpolator.valid? + + context.logger.instrument(:config_file_expand_content_includes) do + expanded_content_hash # calling the method expands then memoizes the result end + + validate_hash! end protected def content_result - strong_memoize(:content_hash) do - ::Gitlab::Ci::Config::Yaml - .load_result!(content, project: context.project) - end + ::Gitlab::Ci::Config::Yaml + .load_result!(content, project: context.project) + end + strong_memoize_attr :content_result + + def content_inputs + params.to_h[:with] end + strong_memoize_attr :content_inputs def content_hash - return unless content_result.valid? + interpolator.interpolate! + + interpolator.to_hash + end + strong_memoize_attr :content_hash - content_result.content + def interpolator + External::Interpolator + .new(content_result, content_inputs, context) end + strong_memoize_attr :interpolator def expanded_content_hash - return unless content_hash + return if content_hash.blank? strong_memoize(:expanded_content_hash) do expand_includes(content_hash) @@ -132,6 +147,12 @@ module Gitlab end end + def validate_interpolation! + return if interpolator.valid? + + errors.push("`#{masked_location}`: #{interpolator.error_message}") + end + def expand_includes(hash) External::Processor.new(hash, context.mutate(expand_context_attrs)).perform end diff --git a/lib/gitlab/ci/config/external/file/component.rb b/lib/gitlab/ci/config/external/file/component.rb index 7ab7dc3d64e..9679d78a1aa 100644 --- a/lib/gitlab/ci/config/external/file/component.rb +++ b/lib/gitlab/ci/config/external/file/component.rb @@ -11,6 +11,7 @@ module Gitlab def initialize(params, context) @location = params[:component] + super end @@ -48,9 +49,7 @@ module Gitlab end def validate_content! - return if content.present? - - errors.push(component_result.message) + errors.push(component_result.message) unless content.present? end private diff --git a/lib/gitlab/ci/config/external/interpolator.rb b/lib/gitlab/ci/config/external/interpolator.rb new file mode 100644 index 00000000000..5629c4a9766 --- /dev/null +++ b/lib/gitlab/ci/config/external/interpolator.rb @@ -0,0 +1,123 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + class Config + module External + ## + # Config::External::Interpolation perform includable file interpolation, and surfaces all possible interpolation + # errors. It is designed to provide an external file's validation context too. + # + class Interpolator + include ::Gitlab::Utils::StrongMemoize + + attr_reader :config, :args, :ctx, :errors + + def initialize(config, args, ctx = nil) + @config = config + @args = args.to_h + @ctx = ctx + @errors = [] + + validate! + end + + def valid? + @errors.none? + end + + def ready? + ## + # Interpolation is ready when it has been either interrupted by an error or finished with a result. + # + @result || @errors.any? + end + + def interpolate? + enabled? && has_header? && valid? + end + + def has_header? + config.has_header? && config.header.present? + end + + def to_hash + @result.to_h + end + + def error_message + # Interpolator can have multiple error messages, like: ["interpolation interrupted by errors", "unknown + # interpolation key: `abc`"] ? + # + # We are joining them together into a single one, because only one error can be surfaced when an external + # file gets included and is invalid. The limit to three error messages combined is more than required. + # + @errors.first(3).join(', ') + end + + ## + # TODO Add `instrument.logger` instrumentation blocks: + # https://gitlab.com/gitlab-org/gitlab/-/issues/396722 + # + def interpolate! + return {} unless valid? + return @result ||= content.to_h unless interpolate? + + return @errors.concat(header.errors) unless header.valid? + return @errors.concat(inputs.errors) unless inputs.valid? + return @errors.concat(context.errors) unless context.valid? + return @errors.concat(template.errors) unless template.valid? + + @result ||= template.interpolated.to_h.deep_symbolize_keys + end + strong_memoize_attr :interpolate! + + private + + def validate! + return errors.push('content does not have a valid YAML syntax') unless config.valid? + + return unless has_header? && !enabled? + + errors.push('can not evaluate included file because interpolation is disabled') + end + + def enabled? + return false if ctx.nil? + + ::Feature.enabled?(:ci_includable_files_interpolation, ctx.project) + end + + def header + @entry ||= Ci::Config::Header::Root.new(config.header).tap do |header| + header.key = 'header' + + header.compose! + end + end + + def content + @content ||= config.content + end + + def spec + @spec ||= header.inputs_value + end + + def inputs + @inputs ||= Ci::Input::Inputs.new(spec, args) + end + + def context + @context ||= Ci::Interpolation::Context.new({ inputs: inputs.to_hash }) + end + + def template + @template ||= ::Gitlab::Ci::Interpolation::Template + .new(content, context) + end + end + end + end + end +end diff --git a/lib/gitlab/ci/config/external/mapper/matcher.rb b/lib/gitlab/ci/config/external/mapper/matcher.rb index e59eaa6d324..5072d0971cf 100644 --- a/lib/gitlab/ci/config/external/mapper/matcher.rb +++ b/lib/gitlab/ci/config/external/mapper/matcher.rb @@ -7,22 +7,13 @@ module Gitlab class Mapper # Matches the first file type that matches the given location class Matcher < Base - FILE_CLASSES = [ - External::File::Local, - External::File::Project, - External::File::Component, - External::File::Remote, - External::File::Template, - External::File::Artifact - ].freeze - - FILE_SUBKEYS = FILE_CLASSES.map { |f| f.name.demodulize.downcase }.freeze + include Gitlab::Utils::StrongMemoize private def process_without_instrumentation(locations) locations.map do |location| - matching = FILE_CLASSES.map do |file_class| + matching = file_classes.map do |file_class| file_class.new(location, context) end.select(&:matching?) @@ -31,10 +22,10 @@ module Gitlab elsif matching.empty? raise Mapper::AmbigiousSpecificationError, "`#{masked_location(location.to_json)}` does not have a valid subkey for include. " \ - "Valid subkeys are: `#{FILE_SUBKEYS.join('`, `')}`" + "Valid subkeys are: `#{file_subkeys.join('`, `')}`" else raise Mapper::AmbigiousSpecificationError, - "Each include must use only one of: `#{FILE_SUBKEYS.join('`, `')}`" + "Each include must use only one of: `#{file_subkeys.join('`, `')}`" end end end @@ -42,6 +33,28 @@ module Gitlab def masked_location(location) context.mask_variables_from(location) end + + def file_subkeys + file_classes.map { |f| f.name.demodulize.downcase }.freeze + end + strong_memoize_attr :file_subkeys + + def file_classes + classes = [ + External::File::Local, + External::File::Project, + External::File::Remote, + External::File::Template, + External::File::Artifact + ] + + if Feature.enabled?(:ci_include_components, context.project&.root_namespace) + classes << External::File::Component + end + + classes + end + strong_memoize_attr :file_classes end end end diff --git a/lib/gitlab/ci/config/header/input.rb b/lib/gitlab/ci/config/header/input.rb index 525b009afe3..7f0edaaac4c 100644 --- a/lib/gitlab/ci/config/header/input.rb +++ b/lib/gitlab/ci/config/header/input.rb @@ -6,6 +6,7 @@ module Gitlab module Header ## # Input parameter used for interpolation with the CI configuration. + # class Input < ::Gitlab::Config::Entry::Node include ::Gitlab::Config::Entry::Validatable include ::Gitlab::Config::Entry::Attributable diff --git a/lib/gitlab/ci/config/header/spec.rb b/lib/gitlab/ci/config/header/spec.rb index 98d6d0d5783..4753c1eb441 100644 --- a/lib/gitlab/ci/config/header/spec.rb +++ b/lib/gitlab/ci/config/header/spec.rb @@ -10,7 +10,7 @@ module Gitlab ALLOWED_KEYS = %i[inputs].freeze validations do - validates :config, type: Hash, allowed_keys: ALLOWED_KEYS + validates :config, allowed_keys: ALLOWED_KEYS end entry :inputs, ::Gitlab::Config::Entry::ComposableHash, diff --git a/lib/gitlab/ci/config/yaml.rb b/lib/gitlab/ci/config/yaml.rb index d1b1b8caa5c..729e7e3ac05 100644 --- a/lib/gitlab/ci/config/yaml.rb +++ b/lib/gitlab/ci/config/yaml.rb @@ -20,7 +20,8 @@ module Gitlab ::Gitlab::Config::Loader::MultiDocYaml.new( content, max_documents: MAX_DOCUMENTS, - additional_permitted_classes: AVAILABLE_TAGS + additional_permitted_classes: AVAILABLE_TAGS, + reject_empty: true ).load! else ::Gitlab::Config::Loader::Yaml diff --git a/lib/gitlab/ci/config/yaml/result.rb b/lib/gitlab/ci/config/yaml/result.rb index 1a3ca53c161..33f9a454106 100644 --- a/lib/gitlab/ci/config/yaml/result.rb +++ b/lib/gitlab/ci/config/yaml/result.rb @@ -17,7 +17,9 @@ module Gitlab end def has_header? - @config.size > 1 + return false unless @config.first.is_a?(Hash) + + @config.size > 1 && @config.first.key?(:spec) end def header @@ -27,7 +29,9 @@ module Gitlab end def content - @config.last + return @config.last if has_header? + + @config.first end end end diff --git a/lib/gitlab/ci/input/arguments/default.rb b/lib/gitlab/ci/input/arguments/default.rb index fd61c1ab786..c6762b04870 100644 --- a/lib/gitlab/ci/input/arguments/default.rb +++ b/lib/gitlab/ci/input/arguments/default.rb @@ -9,7 +9,9 @@ module Gitlab # class Default < Input::Arguments::Base def validate! - error('invalid specification') unless default.present? + return error('argument specification invalid') unless spec.key?(:default) + + error('invalid default value') unless default.is_a?(String) || default.nil? end ## @@ -35,6 +37,8 @@ module Gitlab end def self.matches?(spec) + return false unless spec.is_a?(Hash) + spec.count == 1 && spec.each_key.first == :default end end diff --git a/lib/gitlab/ci/input/arguments/options.rb b/lib/gitlab/ci/input/arguments/options.rb index debc89b10bd..855dab129be 100644 --- a/lib/gitlab/ci/input/arguments/options.rb +++ b/lib/gitlab/ci/input/arguments/options.rb @@ -25,7 +25,8 @@ module Gitlab # The configuration above will return an empty value. # def validate! - return error('argument specification invalid') if options.to_a.empty? + return error('argument specification invalid') unless options.is_a?(Array) + return error('options argument empty') if options.empty? if !value.nil? error("argument value #{value} not allowlisted") unless options.include?(value) @@ -43,6 +44,8 @@ module Gitlab end def self.matches?(spec) + return false unless spec.is_a?(Hash) + spec.count == 1 && spec.each_key.first == :options end end diff --git a/lib/gitlab/ci/input/arguments/required.rb b/lib/gitlab/ci/input/arguments/required.rb index b4e218ed29e..2e39f548731 100644 --- a/lib/gitlab/ci/input/arguments/required.rb +++ b/lib/gitlab/ci/input/arguments/required.rb @@ -28,7 +28,7 @@ module Gitlab # website: # ``` # - # An empty value, that has no specification is also considered as a "required" input, however we should + # An empty string value, that has no specification is also considered as a "required" input, however we should # never see that being used, because it will be rejected by Ci::Config::Header validation. # # ```yaml @@ -36,8 +36,17 @@ module Gitlab # inputs: # website: "" # ``` + # + # An empty hash value is also considered to be a required argument: + # + # ```yaml + # spec: + # inputs: + # website: {} + # ``` + # def self.matches?(spec) - spec.to_s.empty? + spec.blank? end end end diff --git a/lib/gitlab/ci/input/inputs.rb b/lib/gitlab/ci/input/inputs.rb index 743ae2ecf1e..1b544e63e7d 100644 --- a/lib/gitlab/ci/input/inputs.rb +++ b/lib/gitlab/ci/input/inputs.rb @@ -19,8 +19,8 @@ module Gitlab ].freeze def initialize(spec, args) - @spec = spec - @args = args + @spec = spec.to_h + @args = args.to_h @inputs = [] @errors = [] diff --git a/lib/gitlab/ci/interpolation/access.rb b/lib/gitlab/ci/interpolation/access.rb index 42598458902..f9bbd3e118d 100644 --- a/lib/gitlab/ci/interpolation/access.rb +++ b/lib/gitlab/ci/interpolation/access.rb @@ -45,7 +45,11 @@ module Gitlab raise ArgumentError, 'access path invalid' unless valid? @value ||= objects.inject(@ctx) do |memo, value| - memo.fetch(value.to_sym) + key = value.to_sym + + break @errors.push("unknown interpolation key: `#{key}`") unless memo.key?(key) + + memo.fetch(key) end rescue KeyError => e @errors.push(e) diff --git a/lib/gitlab/ci/interpolation/context.rb b/lib/gitlab/ci/interpolation/context.rb index ce7a86a3c9b..69c1fbb792c 100644 --- a/lib/gitlab/ci/interpolation/context.rb +++ b/lib/gitlab/ci/interpolation/context.rb @@ -38,6 +38,10 @@ module Gitlab @context.fetch(field) end + def key?(name) + @context.key?(name) + end + def to_h @context.to_h end @@ -53,7 +57,7 @@ module Gitlab end end - values.max + values.max.to_i end def self.fabricate(context) diff --git a/lib/gitlab/ci/jwt_v2.rb b/lib/gitlab/ci/jwt_v2.rb index cfefa79d9e0..fdff5035d37 100644 --- a/lib/gitlab/ci/jwt_v2.rb +++ b/lib/gitlab/ci/jwt_v2.rb @@ -20,11 +20,23 @@ module Gitlab attr_reader :aud def reserved_claims - super.merge( + super.merge({ iss: Settings.gitlab.base_url, sub: "project_path:#{project.full_path}:ref_type:#{ref_type}:ref:#{source_ref}", - aud: aud - ) + aud: aud, + user_identities: user_identities + }.compact) + end + + def user_identities + return unless user&.pass_user_identities_to_ci_jwt + + user.identities.map do |identity| + { + provider: identity.provider.to_s, + extern_uid: identity.extern_uid.to_s + } + end end end end diff --git a/lib/gitlab/ci/pipeline/seed/build.rb b/lib/gitlab/ci/pipeline/seed/build.rb index 484e18c6979..98f488d0f38 100644 --- a/lib/gitlab/ci/pipeline/seed/build.rb +++ b/lib/gitlab/ci/pipeline/seed/build.rb @@ -123,6 +123,7 @@ module Gitlab end @needs_attributes.flat_map do |need| + # We ignore the optional needed job in case it is excluded from the pipeline due to the job's rules. next if need[:optional] result = need_present?(need) diff --git a/lib/gitlab/ci/reports/security/finding.rb b/lib/gitlab/ci/reports/security/finding.rb index 45e67528f12..bf48c7d0bb7 100644 --- a/lib/gitlab/ci/reports/security/finding.rb +++ b/lib/gitlab/ci/reports/security/finding.rb @@ -190,6 +190,10 @@ module Gitlab original_data['assets'] || [] end + def raw_source_code_extract + original_data['raw_source_code_extract'] + end + # Returns either the max priority signature hex # or the location fingerprint def location_fingerprint diff --git a/lib/gitlab/ci/status/build/factory.rb b/lib/gitlab/ci/status/build/factory.rb index a4434e2c144..54f6784b847 100644 --- a/lib/gitlab/ci/status/build/factory.rb +++ b/lib/gitlab/ci/status/build/factory.rb @@ -11,12 +11,12 @@ module Gitlab Status::Build::Manual, Status::Build::Canceled, Status::Build::Created, - Status::Build::WaitingForResource, Status::Build::Preparing, Status::Build::Pending, Status::Build::Skipped, Status::Build::WaitingForApproval], - [Status::Build::Cancelable, + [Status::Build::WaitingForResource, + Status::Build::Cancelable, Status::Build::Retryable], [Status::Build::FailedUnmetPrerequisites, Status::Build::Failed], diff --git a/lib/gitlab/ci/status/composite.rb b/lib/gitlab/ci/status/composite.rb index 002bd846ab1..1ba78b357e5 100644 --- a/lib/gitlab/ci/status/composite.rb +++ b/lib/gitlab/ci/status/composite.rb @@ -8,17 +8,18 @@ module Gitlab # This class accepts an array of arrays/hashes/or objects # `with_allow_failure` will be removed when deleting ci_remove_ensure_stage_service - def initialize(all_statuses, with_allow_failure: true, dag: false) - unless all_statuses.respond_to?(:pluck) - raise ArgumentError, "all_statuses needs to respond to `.pluck`" + def initialize(all_jobs, with_allow_failure: true, dag: false, project: nil) + unless all_jobs.respond_to?(:pluck) + raise ArgumentError, "all_jobs needs to respond to `.pluck`" end @status_set = Set.new @status_key = 0 @allow_failure_key = 1 if with_allow_failure @dag = dag + @project = project - consume_all_statuses(all_statuses) + consume_all_jobs(all_jobs) end # The status calculation is order dependent, @@ -28,11 +29,13 @@ module Gitlab # based on what statuses are no longer valid based on the # data set that we have # - # This method is used for two cases: - # 1. When it is called for a stage or a pipeline (with `all_statuses` from all jobs in a stage or a pipeline), + # This method is used for three cases: + # 1. When it is called for a stage or a pipeline (with `all_jobs` from all jobs in a stage or a pipeline), # then, the returned status is assigned to the stage or pipeline. - # 2. When it is called for a job (with `all_statuses` from all previous jobs or all needed jobs), + # 2. When it is called for a job (with `all_jobs` from all previous jobs or all needed jobs), # then, the returned status is used to determine if the job is processed or not. + # 3. When it is called for a group (of jobs that are related), + # then, the returned status is used to show the overall status of the group. # rubocop: disable Metrics/CyclomaticComplexity # rubocop: disable Metrics/PerceivedComplexity def status @@ -42,9 +45,6 @@ module Gitlab if @dag && any_skipped_or_ignored? # The DAG job is skipped if one of the needs does not run at all. 'skipped' - elsif @dag && !only_of?(:success, :failed, :canceled, :skipped, :success_with_warnings) - # DAG is blocked from executing if a dependent is not "complete" - 'pending' elsif only_of?(:skipped, :ignored) 'skipped' elsif only_of?(:success, :skipped, :success_with_warnings, :ignored) @@ -101,41 +101,41 @@ module Gitlab any_of?(:skipped) || any_of?(:ignored) end - def consume_all_statuses(all_statuses) + def consume_all_jobs(all_jobs) columns = [] columns[@status_key] = :status columns[@allow_failure_key] = :allow_failure if @allow_failure_key - all_statuses + all_jobs .pluck(*columns) # rubocop: disable CodeReuse/ActiveRecord - .each do |status_attrs| - consume_status(Array.wrap(status_attrs)) + .each do |job_attrs| + consume_job_status(Array.wrap(job_attrs)) end end - def consume_status(status_attrs) + def consume_job_status(job_attrs) status_result = - if success_with_warnings?(status_attrs) + if success_with_warnings?(job_attrs) :success_with_warnings - elsif ignored_status?(status_attrs) + elsif ignored_status?(job_attrs) :ignored else - status_attrs[@status_key].to_sym + job_attrs[@status_key].to_sym end @status_set.add(status_result) end - def success_with_warnings?(status) + def success_with_warnings?(job_attrs) @allow_failure_key && - status[@allow_failure_key] && - ::Ci::HasStatus::PASSED_WITH_WARNINGS_STATUSES.include?(status[@status_key]) + job_attrs[@allow_failure_key] && + ::Ci::HasStatus::PASSED_WITH_WARNINGS_STATUSES.include?(job_attrs[@status_key]) end - def ignored_status?(status) + def ignored_status?(job_attrs) @allow_failure_key && - status[@allow_failure_key] && - ::Ci::HasStatus::IGNORED_STATUSES.include?(status[@status_key]) + job_attrs[@allow_failure_key] && + ::Ci::HasStatus::IGNORED_STATUSES.include?(job_attrs[@status_key]) end end end diff --git a/lib/gitlab/ci/status/processable/waiting_for_resource.rb b/lib/gitlab/ci/status/processable/waiting_for_resource.rb index c9b1dd795d0..ac82c99b5f1 100644 --- a/lib/gitlab/ci/status/processable/waiting_for_resource.rb +++ b/lib/gitlab/ci/status/processable/waiting_for_resource.rb @@ -17,9 +17,39 @@ module Gitlab } end + def has_action? + current_processable.present? + end + + def action_icon + nil + end + + def action_title + nil + end + + def action_button_title + _('View job currently using resource') + end + + def action_path + project_job_path(subject.project, current_processable) + end + + def action_method + :get + end + def self.matches?(processable, _) processable.waiting_for_resource? end + + private + + def current_processable + @current_processable ||= subject.resource_group.current_processable + end end end end diff --git a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml index 2f7c16f0904..aeadc89095b 100644 --- a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml @@ -1,5 +1,5 @@ variables: - AUTO_BUILD_IMAGE_VERSION: 'v1.30.0' + AUTO_BUILD_IMAGE_VERSION: 'v1.31.0' build: stage: build diff --git a/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml index 2f7c16f0904..aeadc89095b 100644 --- a/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml @@ -1,5 +1,5 @@ variables: - AUTO_BUILD_IMAGE_VERSION: 'v1.30.0' + AUTO_BUILD_IMAGE_VERSION: 'v1.31.0' build: stage: build diff --git a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml index 47b79302828..b2ab6704e35 100644 --- a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml @@ -8,7 +8,7 @@ code_quality: variables: DOCKER_DRIVER: overlay2 DOCKER_TLS_CERTDIR: "" - CODE_QUALITY_IMAGE_TAG: "0.89.0" + CODE_QUALITY_IMAGE_TAG: "0.94.0" CODE_QUALITY_IMAGE: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/ci-cd/codequality:$CODE_QUALITY_IMAGE_TAG" needs: [] script: diff --git a/lib/gitlab/ci/templates/Jobs/Container-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Container-Scanning.gitlab-ci.yml index 7f8e2150c71..8063f3d1e69 100644 --- a/lib/gitlab/ci/templates/Jobs/Container-Scanning.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Container-Scanning.gitlab-ci.yml @@ -40,7 +40,7 @@ container_scanning: reports: container_scanning: gl-container-scanning-report.json dependency_scanning: gl-dependency-scanning-report.json - paths: [gl-container-scanning-report.json, gl-dependency-scanning-report.json] + paths: [gl-container-scanning-report.json, gl-dependency-scanning-report.json, "**/gl-sbom-*.cdx.json"] dependencies: [] script: - gtcs scan diff --git a/lib/gitlab/ci/templates/Jobs/Container-Scanning.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Container-Scanning.latest.gitlab-ci.yml index 15688da71ab..24c23ce89f3 100644 --- a/lib/gitlab/ci/templates/Jobs/Container-Scanning.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Container-Scanning.latest.gitlab-ci.yml @@ -40,12 +40,12 @@ container_scanning: reports: container_scanning: gl-container-scanning-report.json dependency_scanning: gl-dependency-scanning-report.json - paths: [gl-container-scanning-report.json, gl-dependency-scanning-report.json] + paths: [gl-container-scanning-report.json, gl-dependency-scanning-report.json, "**/gl-sbom-*.cdx.json"] dependencies: [] script: - gtcs scan rules: - - if: $CONTAINER_SCANNING_DISABLED + - if: $CONTAINER_SCANNING_DISABLED == 'true' || $CONTAINER_SCANNING_DISABLED == '1' when: never # Add the job to merge request pipelines if there's an open merge request. diff --git a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml index 61c2b468899..e336f69a7f6 100644 --- a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml @@ -1,5 +1,5 @@ variables: - DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.47.0' + DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.48.0' .dast-auto-deploy: image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}" diff --git a/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml index 31d19779434..2196630296b 100644 --- a/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml @@ -63,6 +63,7 @@ dependency_scanning: - '**/npm-shrinkwrap.json' - '**/package-lock.json' - '**/yarn.lock' + - '**/pnpm-lock.yaml' - '**/packages.lock.json' - '**/conan.lock' diff --git a/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.latest.gitlab-ci.yml index 9ab17997c27..46161dce74c 100644 --- a/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.latest.gitlab-ci.yml @@ -63,6 +63,7 @@ dependency_scanning: - '**/npm-shrinkwrap.json' - '**/package-lock.json' - '**/yarn.lock' + - '**/pnpm-lock.yaml' - '**/packages.lock.json' - '**/conan.lock' @@ -74,7 +75,7 @@ gemnasium-dependency_scanning: DS_ANALYZER_NAME: "gemnasium" GEMNASIUM_LIBRARY_SCAN_ENABLED: "true" rules: - - if: $DEPENDENCY_SCANNING_DISABLED + - if: $DEPENDENCY_SCANNING_DISABLED == 'true' || $DEPENDENCY_SCANNING_DISABLED == '1' when: never - if: $DS_EXCLUDED_ANALYZERS =~ /gemnasium([^-]|$)/ when: never @@ -121,7 +122,7 @@ gemnasium-maven-dependency_scanning: variables: DS_ANALYZER_NAME: "gemnasium-maven" rules: - - if: $DEPENDENCY_SCANNING_DISABLED + - if: $DEPENDENCY_SCANNING_DISABLED == 'true' || $DEPENDENCY_SCANNING_DISABLED == '1' when: never - if: $DS_EXCLUDED_ANALYZERS =~ /gemnasium-maven/ when: never @@ -169,7 +170,7 @@ gemnasium-python-dependency_scanning: variables: DS_ANALYZER_NAME: "gemnasium-python" rules: - - if: $DEPENDENCY_SCANNING_DISABLED + - if: $DEPENDENCY_SCANNING_DISABLED == 'true' || $DEPENDENCY_SCANNING_DISABLED == '1' when: never - if: $DS_EXCLUDED_ANALYZERS =~ /gemnasium-python/ when: never diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml index 9bac82b660f..ea6216a9210 100644 --- a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml @@ -1,5 +1,5 @@ variables: - AUTO_DEPLOY_IMAGE_VERSION: 'v2.47.0' + AUTO_DEPLOY_IMAGE_VERSION: 'v2.48.0' .auto-deploy: image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}" diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml index ec43217792f..34560600c10 100644 --- a/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml @@ -1,5 +1,5 @@ variables: - AUTO_DEPLOY_IMAGE_VERSION: 'v2.47.0' + AUTO_DEPLOY_IMAGE_VERSION: 'v2.48.0' .auto-deploy: image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}" diff --git a/lib/gitlab/ci/templates/Jobs/License-Scanning.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/License-Scanning.latest.gitlab-ci.yml index e47f669c2e2..8e1b0159cb0 100644 --- a/lib/gitlab/ci/templates/Jobs/License-Scanning.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/License-Scanning.latest.gitlab-ci.yml @@ -32,7 +32,7 @@ license_scanning: license_scanning: gl-license-scanning-report.json dependencies: [] rules: - - if: $LICENSE_MANAGEMENT_DISABLED + - if: $LICENSE_MANAGEMENT_DISABLED == 'true' || $LICENSE_MANAGEMENT_DISABLED == '1' when: never # Add the job to merge request pipelines if there's an open merge request. diff --git a/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml index 8b49d2de8cf..7b2e9e1222a 100644 --- a/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml @@ -1,7 +1,7 @@ # Read more about this feature here: https://docs.gitlab.com/ee/user/application_security/sast/ # # Configure SAST with CI/CD variables (https://docs.gitlab.com/ee/ci/variables/index.html). -# List of available variables: https://docs.gitlab.com/ee/user/application_security/sast/index.html#available-variables +# List of available variables: https://docs.gitlab.com/ee/user/application_security/sast/index.html#available-cicd-variables variables: # Setting this variable will affect all Security templates diff --git a/lib/gitlab/ci/templates/Python.gitlab-ci.yml b/lib/gitlab/ci/templates/Python.gitlab-ci.yml index febbb36d834..5797bcbaca9 100644 --- a/lib/gitlab/ci/templates/Python.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Python.gitlab-ci.yml @@ -32,7 +32,7 @@ test: script: - python setup.py test - pip install tox flake8 # you can also use tox - - tox -e py36,flake8 + - tox -e py,flake8 run: script: diff --git a/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml index 27bcc14bcf5..de8a21819cc 100644 --- a/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml @@ -44,13 +44,19 @@ dast: reports: dast: gl-dast-report.json rules: - - if: $DAST_DISABLED + - if: $DAST_DISABLED == 'true' || $DAST_DISABLED == '1' when: never - - if: $DAST_DISABLED_FOR_DEFAULT_BRANCH && + - if: $DAST_DISABLED_FOR_DEFAULT_BRANCH == 'true' && $CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME when: never + - if: $DAST_DISABLED_FOR_DEFAULT_BRANCH == '1' && + $CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME + when: never + - if: $CI_DEFAULT_BRANCH != $CI_COMMIT_REF_NAME && + $REVIEW_DISABLED == 'true' + when: never - if: $CI_DEFAULT_BRANCH != $CI_COMMIT_REF_NAME && - $REVIEW_DISABLED + $REVIEW_DISABLED == '1' when: never # Add the job to merge request pipelines if there's an open merge request. diff --git a/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml index bc23a7c2a95..3249bd2bcac 100644 --- a/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml @@ -52,6 +52,9 @@ cache: - gitlab-terraform plan-json resource_group: ${TF_STATE_NAME} artifacts: + # The next line, which disables public access to pipeline artifacts, may not be available everywhere. + # See: https://docs.gitlab.com/ee/ci/yaml/#artifactspublic + public: false paths: - ${TF_ROOT}/plan.cache reports: diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 32f64948635..a3f1b472710 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -166,13 +166,6 @@ module Gitlab end def destroy! - # TODO: Remove this logging once we confirmed new live trace architecture is functional. - # See https://gitlab.com/gitlab-com/gl-infra/infrastructure/issues/4667. - unless build.has_archived_trace? - Sidekiq.logger.warn(message: 'The job does not have archived trace but going to be destroyed.', - job_id: build.id) - end - trace_chunks.fast_destroy_all @tell = @size = 0 ensure diff --git a/lib/gitlab/ci/variables/builder.rb b/lib/gitlab/ci/variables/builder.rb index 89d681c418d..86e54fdfcdf 100644 --- a/lib/gitlab/ci/variables/builder.rb +++ b/lib/gitlab/ci/variables/builder.rb @@ -140,11 +140,13 @@ module Gitlab # Set environment name here so we can access it when evaluating the job's rules variables.append(key: 'CI_ENVIRONMENT_NAME', value: job.environment) if job.environment - # legacy variables - variables.append(key: 'CI_BUILD_NAME', value: job.name) - variables.append(key: 'CI_BUILD_STAGE', value: job.stage_name) - variables.append(key: 'CI_BUILD_TRIGGERED', value: 'true') if job.trigger_request - variables.append(key: 'CI_BUILD_MANUAL', value: 'true') if job.action? + if Feature.disabled?(:ci_remove_legacy_predefined_variables, project) + # legacy variables + variables.append(key: 'CI_BUILD_NAME', value: job.name) + variables.append(key: 'CI_BUILD_STAGE', value: job.stage_name) + variables.append(key: 'CI_BUILD_TRIGGERED', value: 'true') if job.trigger_request + variables.append(key: 'CI_BUILD_MANUAL', value: 'true') if job.action? + end end end diff --git a/lib/gitlab/ci/variables/builder/pipeline.rb b/lib/gitlab/ci/variables/builder/pipeline.rb index 96d6f1673b9..1e7a18d70b0 100644 --- a/lib/gitlab/ci/variables/builder/pipeline.rb +++ b/lib/gitlab/ci/variables/builder/pipeline.rb @@ -40,7 +40,7 @@ module Gitlab attr_reader :pipeline - def predefined_commit_variables + def predefined_commit_variables # rubocop:disable Metrics/AbcSize - Remove this rubocop:disable when FF `ci_remove_legacy_predefined_variables` is removed. Gitlab::Ci::Variables::Collection.new.tap do |variables| next variables unless pipeline.sha.present? @@ -57,7 +57,9 @@ module Gitlab variables.append(key: 'CI_COMMIT_TIMESTAMP', value: pipeline.git_commit_timestamp.to_s) variables.append(key: 'CI_COMMIT_AUTHOR', value: pipeline.git_author_full_text.to_s) - variables.concat(legacy_predefined_commit_variables) + if Feature.disabled?(:ci_remove_legacy_predefined_variables, pipeline.project) + variables.concat(legacy_predefined_commit_variables) + end end end strong_memoize_attr :predefined_commit_variables @@ -81,7 +83,9 @@ module Gitlab variables.append(key: 'CI_COMMIT_TAG', value: pipeline.ref) variables.append(key: 'CI_COMMIT_TAG_MESSAGE', value: git_tag.message) - variables.concat(legacy_predefined_commit_tag_variables) + if Feature.disabled?(:ci_remove_legacy_predefined_variables, pipeline.project) + variables.concat(legacy_predefined_commit_tag_variables) + end end end strong_memoize_attr :predefined_commit_tag_variables diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb index 59acfa80258..0f9e7daf4b8 100644 --- a/lib/gitlab/ci/yaml_processor.rb +++ b/lib/gitlab/ci/yaml_processor.rb @@ -99,7 +99,7 @@ module Gitlab validate_duplicate_needs!(name, needs) needs.each do |need| - validate_job_dependency!(name, need[:name], 'need') + validate_job_dependency!(name, need[:name], 'need', optional: need[:optional]) end end @@ -109,8 +109,13 @@ module Gitlab end end - def validate_job_dependency!(name, dependency, dependency_type = 'dependency') + def validate_job_dependency!(name, dependency, dependency_type = 'dependency', optional: false) unless @jobs[dependency.to_sym] + # Here, we ignore the optional needed job if it is not in the result YAML due to the `include` + # rules. In `lib/gitlab/ci/pipeline/seed/build.rb`, we use `optional` again to ignore the + # optional needed job in case it is excluded from the pipeline due to the job's rules. + return if optional + error!("#{name} job: undefined #{dependency_type}: #{dependency}") end diff --git a/lib/gitlab/ci/yaml_processor/result.rb b/lib/gitlab/ci/yaml_processor/result.rb index d867439b10b..6207b595fc6 100644 --- a/lib/gitlab/ci/yaml_processor/result.rb +++ b/lib/gitlab/ci/yaml_processor/result.rb @@ -123,7 +123,8 @@ module Gitlab start_in: job[:start_in], trigger: job[:trigger], bridge_needs: job.dig(:needs, :bridge)&.first, - release: job[:release] + release: job[:release], + publish: job[:publish] }.compact }.compact end diff --git a/lib/gitlab/config/loader/multi_doc_yaml.rb b/lib/gitlab/config/loader/multi_doc_yaml.rb index 34080d26b7c..084d32a85bc 100644 --- a/lib/gitlab/config/loader/multi_doc_yaml.rb +++ b/lib/gitlab/config/loader/multi_doc_yaml.rb @@ -8,10 +8,11 @@ module Gitlab MULTI_DOC_DIVIDER = /^---\s+/.freeze - def initialize(config, max_documents:, additional_permitted_classes: []) + def initialize(config, max_documents:, additional_permitted_classes: [], reject_empty: false) @config = config @max_documents = max_documents @additional_permitted_classes = additional_permitted_classes + @reject_empty = reject_empty end def valid? @@ -28,7 +29,7 @@ module Gitlab private - attr_reader :config, :max_documents, :additional_permitted_classes + attr_reader :config, :max_documents, :additional_permitted_classes, :reject_empty # Valid YAML files can start with either a leading delimiter or no delimiter. # To avoid counting a leading delimiter towards the document limit, @@ -40,6 +41,7 @@ module Gitlab .map { |d| Yaml.new(d, additional_permitted_classes: additional_permitted_classes) } docs.shift if docs.first.blank? + docs.reject!(&:blank?) if reject_empty docs end strong_memoize_attr :documents diff --git a/lib/gitlab/content_security_policy/config_loader.rb b/lib/gitlab/content_security_policy/config_loader.rb index 477877e6a7c..ceca206b084 100644 --- a/lib/gitlab/content_security_policy/config_loader.rb +++ b/lib/gitlab/content_security_policy/config_loader.rb @@ -50,7 +50,6 @@ module Gitlab allow_sentry(directives) if Gitlab::CurrentSettings.try(:sentry_enabled) && Gitlab::CurrentSettings.try(:sentry_clientside_dsn) allow_framed_gitlab_paths(directives) allow_customersdot(directives) if ENV['CUSTOMER_PORTAL_URL'].present? - allow_kas(directives) allow_review_apps(directives) if ENV['REVIEW_APPS_ENABLED'] # The follow section contains workarounds to patch Safari's lack of support for CSP Level 3 @@ -148,17 +147,6 @@ module Gitlab append_to_directive(directives, 'frame_src', customersdot_host) end - def self.allow_kas(directives) - return unless ::Gitlab::Kas::UserAccess.enabled? - - kas_url = ::Gitlab::Kas.tunnel_url - return if URI(kas_url).host == ::Gitlab.config.gitlab.host # already allowed, no need for exception - - kas_url += '/' unless kas_url.end_with?('/') - - append_to_directive(directives, 'connect_src', kas_url) - end - def self.allow_legacy_sentry(directives) # Support for Sentry setup via configuration files will be removed in 16.0 # in favor of Gitlab::CurrentSettings. diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb index 756d0afa7e4..f77169f6d2b 100644 --- a/lib/gitlab/database.rb +++ b/lib/gitlab/database.rb @@ -51,6 +51,11 @@ module Gitlab FULLY_QUALIFIED_IDENTIFIER = /^\w+\.\w+$/ + ## Database Modes + MODE_SINGLE_DATABASE = "single-database" + MODE_SINGLE_DATABASE_CI_CONNECTION = "single-database-ci-connection" + MODE_MULTIPLE_DATABASES = "multiple-databases" + def self.database_base_models @database_base_models ||= { # Note that we use ActiveRecord::Base here and not ApplicationRecord. @@ -128,12 +133,29 @@ module Gitlab Gitlab::Runtime.max_threads + headroom end + # Database configured. Returns true even if the database is shared def self.has_config?(database_name) ActiveRecord::Base.configurations .configs_for(env_name: Rails.env, name: database_name.to_s, include_replicas: true) .present? end + # Database configured. Returns false if the database is shared + def self.has_database?(database_name) + db_config = ::Gitlab::Database.database_base_models[database_name]&.connection_db_config + db_config.present? && db_config_share_with(db_config).nil? + end + + def self.database_mode + if !has_config?(CI_DATABASE_NAME) + MODE_SINGLE_DATABASE + elsif has_database?(CI_DATABASE_NAME) + MODE_MULTIPLE_DATABASES + else + MODE_SINGLE_DATABASE_CI_CONNECTION + end + end + class PgUser < ApplicationRecord self.table_name = 'pg_user' self.primary_key = :usename diff --git a/lib/gitlab/database/async_indexes/migration_helpers.rb b/lib/gitlab/database/async_indexes/migration_helpers.rb index f459c43e0ee..d7128a20a0b 100644 --- a/lib/gitlab/database/async_indexes/migration_helpers.rb +++ b/lib/gitlab/database/async_indexes/migration_helpers.rb @@ -77,6 +77,35 @@ module Gitlab async_index end + def prepare_async_index_from_sql(definition) + Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode! + + return unless async_index_creation_available? + + table_name, index_name = extract_table_and_index_names_from_concurrent_index!(definition) + + if index_name_exists?(table_name, index_name) + Gitlab::AppLogger.warn( + message: 'Index not prepared because it already exists', + table_name: table_name, + index_name: index_name) + + return + end + + async_index = Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.find_or_create_by!(name: index_name) do |rec| + rec.table_name = table_name + rec.definition = definition + end + + Gitlab::AppLogger.info( + message: 'Prepared index for async creation', + table_name: async_index.table_name, + index_name: async_index.name) + + async_index + end + # Prepares an index for asynchronous destruction. # # Stores the index information in the postgres_async_indexes table to be removed later. The @@ -110,7 +139,30 @@ module Gitlab end def async_index_creation_available? - connection.table_exists?(:postgres_async_indexes) + table_exists?(:postgres_async_indexes) + end + + private + + delegate :table_exists?, to: :connection, private: true + + def extract_table_and_index_names_from_concurrent_index!(definition) + statement = index_statement_from!(definition) + + raise 'Index statement not found!' unless statement + raise 'Index must be created concurrently!' unless statement.concurrent + raise 'Table does not exist!' unless table_exists?(statement.relation.relname) + + [statement.relation.relname, statement.idxname] + end + + # This raises `PgQuery::ParseError` if the given statement + # is syntactically incorrect, therefore, validates that the + # index definition is correct. + def index_statement_from!(definition) + parsed_query = PgQuery.parse(definition) + + parsed_query.tree.stmts[0].stmt.index_stmt end end end diff --git a/lib/gitlab/database/background_migration/batched_job.rb b/lib/gitlab/database/background_migration/batched_job.rb index 5147ea92291..523ab2a9f27 100644 --- a/lib/gitlab/database/background_migration/batched_job.rb +++ b/lib/gitlab/database/background_migration/batched_job.rb @@ -130,8 +130,6 @@ module Gitlab end def can_reduce_sub_batch_size? - return false unless Feature.enabled?(:reduce_sub_batch_size_on_timeouts) - still_retryable? && within_batch_size_boundaries? end diff --git a/lib/gitlab/database/background_migration/batched_migration.rb b/lib/gitlab/database/background_migration/batched_migration.rb index 429dc79e170..a883996a5c5 100644 --- a/lib/gitlab/database/background_migration/batched_migration.rb +++ b/lib/gitlab/database/background_migration/batched_migration.rb @@ -25,6 +25,7 @@ module Gitlab scope :queue_order, -> { order(id: :asc) } scope :queued, -> { with_statuses(:active, :paused) } + scope :finalizing, -> { with_status(:finalizing) } scope :ordered_by_created_at_desc, -> { order(created_at: :desc) } # on_hold_until is a temporary runtime status which puts execution "on hold" @@ -219,7 +220,7 @@ module Gitlab end def health_context - HealthStatus::Context.new(connection, [table_name]) + HealthStatus::Context.new(connection, [table_name], gitlab_schema.to_sym) end def hold!(until_time: 10.minutes.from_now) diff --git a/lib/gitlab/database/background_migration/health_status.rb b/lib/gitlab/database/background_migration/health_status.rb index 506d2996ad5..c66f30ffecc 100644 --- a/lib/gitlab/database/background_migration/health_status.rb +++ b/lib/gitlab/database/background_migration/health_status.rb @@ -6,11 +6,12 @@ module Gitlab module HealthStatus DEFAULT_INIDICATORS = [ Indicators::AutovacuumActiveOnTable, - Indicators::WriteAheadLog + Indicators::WriteAheadLog, + Indicators::PatroniApdex ].freeze # Rather than passing along the migration, we use a more explicitly defined context - Context = Struct.new(:connection, :tables) + Context = Struct.new(:connection, :tables, :gitlab_schema) def self.evaluate(migration, indicators = DEFAULT_INIDICATORS) indicators.map do |indicator| diff --git a/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex.rb b/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex.rb new file mode 100644 index 00000000000..0dd6dd5c2a4 --- /dev/null +++ b/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module BackgroundMigration + module HealthStatus + module Indicators + class PatroniApdex + include Gitlab::Utils::StrongMemoize + + def initialize(context) + @context = context + end + + def evaluate + return Signals::NotAvailable.new(self.class, reason: 'indicator disabled') unless enabled? + + connection_error_message = fetch_connection_error_message + return unknown_signal(connection_error_message) if connection_error_message.present? + + apdex_sli = fetch_sli(apdex_sli_query) + return unknown_signal('Patroni service apdex can not be calculated') unless apdex_sli.present? + + if apdex_sli.to_f > apdex_slo.to_f + Signals::Normal.new(self.class, reason: 'Patroni service apdex is above SLO') + else + Signals::Stop.new(self.class, reason: 'Patroni service apdex is below SLO') + end + end + + private + + attr_reader :context + + def enabled? + Feature.enabled?(:batched_migrations_health_status_patroni_apdex, type: :ops) + end + + def unknown_signal(reason) + Signals::Unknown.new(self.class, reason: reason) + end + + def fetch_connection_error_message + return 'Patroni Apdex Settings not configured' unless database_apdex_settings.present? + return 'Prometheus client is not ready' unless client.ready? + return 'Apdex SLI query is not configured' unless apdex_sli_query + return 'Apdex SLO is not configured' unless apdex_slo + end + + def client + @client ||= Gitlab::PrometheusClient.new( + database_apdex_settings[:prometheus_api_url], + allow_local_requests: true, + verify: true + ) + end + + def database_apdex_settings + @database_apdex_settings ||= Gitlab::CurrentSettings.database_apdex_settings&.with_indifferent_access + end + + def apdex_sli_query + { + gitlab_main: database_apdex_settings[:apdex_sli_query][:main], + gitlab_ci: database_apdex_settings[:apdex_sli_query][:ci] + }.fetch(context.gitlab_schema.to_sym) + end + strong_memoize_attr :apdex_sli_query + + def apdex_slo + { + gitlab_main: database_apdex_settings[:apdex_slo][:main], + gitlab_ci: database_apdex_settings[:apdex_slo][:ci] + }.fetch(context.gitlab_schema.to_sym) + end + strong_memoize_attr :apdex_slo + + def fetch_sli(query) + response = client.query(query) + metric = response&.first || {} + value = metric.fetch('value', []) + + Array.wrap(value).second + end + end + end + end + end + end +end diff --git a/lib/gitlab/database/batch_count.rb b/lib/gitlab/database/batch_count.rb index 7a064fb4005..7249cb3e73b 100644 --- a/lib/gitlab/database/batch_count.rb +++ b/lib/gitlab/database/batch_count.rb @@ -27,7 +27,7 @@ # batch_sum(User, :sign_in_count) # batch_sum(Issue.group(:state_id), :weight)) # batch_average(Ci::Pipeline, :duration) -# batch_average(MergeTrain.group(:status), :duration) +# batch_average(MergeTrains::Car.group(:status), :duration) module Gitlab module Database module BatchCount diff --git a/lib/gitlab/database/load_balancing/action_cable_callbacks.rb b/lib/gitlab/database/load_balancing/action_cable_callbacks.rb index 7164976ff73..fab691117ad 100644 --- a/lib/gitlab/database/load_balancing/action_cable_callbacks.rb +++ b/lib/gitlab/database/load_balancing/action_cable_callbacks.rb @@ -6,14 +6,10 @@ module Gitlab module ActionCableCallbacks def self.install ::ActionCable::Server::Worker.set_callback :work, :around, &wrapper - ::ActionCable::Channel::Base.set_callback :subscribe, :around, &wrapper - ::ActionCable::Channel::Base.set_callback :unsubscribe, :around, &wrapper end def self.wrapper lambda do |_, inner| - ::Gitlab::Database::LoadBalancing::Session.current.use_primary! - inner.call ensure ::Gitlab::Database::LoadBalancing.release_hosts diff --git a/lib/gitlab/database/load_balancing/connection_proxy.rb b/lib/gitlab/database/load_balancing/connection_proxy.rb index 622e310ead3..0d39b47dbba 100644 --- a/lib/gitlab/database/load_balancing/connection_proxy.rb +++ b/lib/gitlab/database/load_balancing/connection_proxy.rb @@ -32,6 +32,7 @@ module Gitlab select_one select_rows quote_column_name + schema_cache ).freeze # hosts - The hosts to use for load balancing. diff --git a/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb b/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb index f7b8d2514ba..95e21c40795 100644 --- a/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb +++ b/lib/gitlab/database/load_balancing/sidekiq_server_middleware.rb @@ -6,7 +6,7 @@ module Gitlab class SidekiqServerMiddleware JobReplicaNotUpToDate = Class.new(::Gitlab::SidekiqMiddleware::RetryError) - MINIMUM_DELAY_INTERVAL_SECONDS = 0.8 + REPLICA_WAIT_SLEEP_SECONDS = 0.5 def call(worker, job, _queue) worker_class = worker.class @@ -18,7 +18,7 @@ module Gitlab ::Gitlab::Database::LoadBalancing::Session.current.use_primary! elsif strategy == :retry raise JobReplicaNotUpToDate, "Sidekiq job #{worker_class} JID-#{job['jid']} couldn't use the replica."\ - " Replica was not up to date." + " Replica was not up to date." else # this means we selected an up-to-date replica, but there is nothing to do in this case. end @@ -49,7 +49,10 @@ module Gitlab # Happy case: we can read from a replica. return replica_strategy(worker_class, job) if databases_in_sync?(wal_locations) - sleep_if_needed(job) + 3.times do + sleep REPLICA_WAIT_SLEEP_SECONDS + break if databases_in_sync?(wal_locations) + end if databases_in_sync?(wal_locations) replica_strategy(worker_class, job) @@ -62,12 +65,6 @@ module Gitlab end end - def sleep_if_needed(job) - remaining_delay = MINIMUM_DELAY_INTERVAL_SECONDS - (Time.current.to_f - job['created_at'].to_f) - - sleep remaining_delay if remaining_delay > 0 && remaining_delay < MINIMUM_DELAY_INTERVAL_SECONDS - end - def get_wal_locations(job) job['dedup_wal_locations'] || job['wal_locations'] end @@ -79,7 +76,7 @@ module Gitlab end def can_retry?(worker_class, job) - worker_class.get_data_consistency == :delayed && not_yet_retried?(job) + worker_class.get_data_consistency == :delayed && not_yet_requeued?(job) end def replica_strategy(worker_class, job) @@ -87,10 +84,10 @@ module Gitlab end def retried_before?(worker_class, job) - worker_class.get_data_consistency == :delayed && !not_yet_retried?(job) + worker_class.get_data_consistency == :delayed && !not_yet_requeued?(job) end - def not_yet_retried?(job) + def not_yet_requeued?(job) # if `retry_count` is `nil` it indicates that this job was never retried # the `0` indicates that this is a first retry job['retry_count'].nil? diff --git a/lib/gitlab/database/lock_writes_manager.rb b/lib/gitlab/database/lock_writes_manager.rb index e8f7b51955d..7e429387ae6 100644 --- a/lib/gitlab/database/lock_writes_manager.rb +++ b/lib/gitlab/database/lock_writes_manager.rb @@ -38,7 +38,7 @@ module Gitlab def lock_writes if table_locked_for_writes? logger&.info "Skipping lock_writes, because #{table_name} is already locked for writes" - return + return result_hash(action: 'skipped') end logger&.info "Database: '#{database_name}', Table: '#{table_name}': Lock Writes".color(:yellow) @@ -50,6 +50,8 @@ module Gitlab SQL execute_sql_statement(sql_statement) + + result_hash(action: 'locked') end def unlock_writes @@ -59,6 +61,8 @@ module Gitlab SQL execute_sql_statement(sql_statement) + + result_hash(action: 'unlocked') end private @@ -113,6 +117,10 @@ module Gitlab def write_trigger_name "gitlab_schema_write_trigger_for_#{table_name_without_schema}" end + + def result_hash(action:) + { action: action, database: database_name, table: table_name, dry_run: dry_run } + end end end end diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb index 3a342abe65d..291f483e6e4 100644 --- a/lib/gitlab/database/migration_helpers.rb +++ b/lib/gitlab/database/migration_helpers.rb @@ -15,6 +15,7 @@ module Gitlab include RenameTableHelpers include AsyncIndexes::MigrationHelpers include AsyncConstraints::MigrationHelpers + include WraparoundVacuumHelpers def define_batchable_model(table_name, connection: self.connection) super(table_name, connection: connection) @@ -79,63 +80,6 @@ module Gitlab end end - # @deprecated Use `create_table` in V2 instead - # - # Creates a new table, optionally allowing the caller to add check constraints to the table. - # Aside from that addition, this method should behave identically to Rails' `create_table` method. - # - # Example: - # - # create_table_with_constraints :some_table do |t| - # t.integer :thing, null: false - # t.text :other_thing - # - # t.check_constraint :thing_is_not_null, 'thing IS NOT NULL' - # t.text_limit :other_thing, 255 - # end - # - # See Rails' `create_table` for more info on the available arguments. - def create_table_with_constraints(table_name, **options, &block) - helper_context = self - - with_lock_retries do - check_constraints = [] - - create_table(table_name, **options) do |t| - t.define_singleton_method(:check_constraint) do |name, definition| - helper_context.send(:validate_check_constraint_name!, name) # rubocop:disable GitlabSecurity/PublicSend - - check_constraints << { name: name, definition: definition } - end - - t.define_singleton_method(:text_limit) do |column_name, limit, name: nil| - # rubocop:disable GitlabSecurity/PublicSend - name = helper_context.send(:text_limit_name, table_name, column_name, name: name) - helper_context.send(:validate_check_constraint_name!, name) - # rubocop:enable GitlabSecurity/PublicSend - - column_name = helper_context.quote_column_name(column_name) - definition = "char_length(#{column_name}) <= #{limit}" - - check_constraints << { name: name, definition: definition } - end - - t.instance_eval(&block) unless block.nil? - end - - next if check_constraints.empty? - - constraint_clauses = check_constraints.map do |constraint| - "ADD CONSTRAINT #{quote_table_name(constraint[:name])} CHECK (#{constraint[:definition]})" - end - - execute(<<~SQL) - ALTER TABLE #{quote_table_name(table_name)} - #{constraint_clauses.join(",\n")} - SQL - end - end - # Creates a new index, concurrently # # Example: @@ -373,6 +317,13 @@ module Gitlab end end + # Since we may be migrating in one go from a previous version without + # `constrained_table_name` then we may see that this column exists + # (as above) but the schema cache is still outdated for the model. + unless Gitlab::Database::PostgresForeignKey.column_names.include?('constrained_table_name') + Gitlab::Database::PostgresForeignKey.reset_column_information + end + fks = Gitlab::Database::PostgresForeignKey.by_constrained_table_name_or_identifier(source) fks = fks.by_referenced_table_name(target) if target diff --git a/lib/gitlab/database/migration_helpers/convert_to_bigint.rb b/lib/gitlab/database/migration_helpers/convert_to_bigint.rb index cf5640deb3d..63928d7dc09 100644 --- a/lib/gitlab/database/migration_helpers/convert_to_bigint.rb +++ b/lib/gitlab/database/migration_helpers/convert_to_bigint.rb @@ -11,7 +11,9 @@ module Gitlab # # Once we are done with the PK conversions we can remove this. def com_or_dev_or_test_but_not_jh? - !Gitlab.jh? && (Gitlab.com? || Gitlab.dev_or_test_env?) + return true if Gitlab.dev_or_test_env? + + Gitlab.com? && !Gitlab.jh? end end end diff --git a/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers.rb b/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers.rb index 30601bffd7a..2221aea9f46 100644 --- a/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers.rb +++ b/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers.rb @@ -9,11 +9,11 @@ module Gitlab DELETED_RECORDS_INSERT_FUNCTION_NAME = 'insert_into_loose_foreign_keys_deleted_records' def track_record_deletions(table) - execute(<<~SQL) - CREATE TRIGGER #{record_deletion_trigger_name(table)} - AFTER DELETE ON #{table} REFERENCING OLD TABLE AS old_table - FOR EACH STATEMENT - EXECUTE FUNCTION #{DELETED_RECORDS_INSERT_FUNCTION_NAME}(); + execute(<<~SQL.squish) + CREATE TRIGGER #{record_deletion_trigger_name(table)} + AFTER DELETE ON #{table} REFERENCING OLD TABLE AS old_table + FOR EACH STATEMENT + EXECUTE FUNCTION #{DELETED_RECORDS_INSERT_FUNCTION_NAME}(); SQL end @@ -21,6 +21,10 @@ module Gitlab drop_trigger(table, record_deletion_trigger_name(table)) end + def has_loose_foreign_key?(table) + trigger_exists?(table, record_deletion_trigger_name(table)) + end + private def record_deletion_trigger_name(table) diff --git a/lib/gitlab/database/migration_helpers/v2.rb b/lib/gitlab/database/migration_helpers/v2.rb index b5b8b58681c..ef48d601eb9 100644 --- a/lib/gitlab/database/migration_helpers/v2.rb +++ b/lib/gitlab/database/migration_helpers/v2.rb @@ -5,24 +5,6 @@ module Gitlab module MigrationHelpers module V2 include Gitlab::Database::MigrationHelpers - - # Superseded by `create_table` override below - def create_table_with_constraints(*_) - raise <<~EOM - #create_table_with_constraints is not supported anymore - use #create_table instead, for example: - - create_table :db_guides do |t| - t.bigint :stars, default: 0, null: false - t.text :title, limit: 128 - t.text :notes, limit: 1024 - - t.check_constraint 'stars > 1000', name: 'so_many_stars' - end - - See https://docs.gitlab.com/ee/development/database/strings_and_the_text_data_type.html - EOM - end - # Creates a new table, optionally allowing the caller to add text limit constraints to the table. # This method only extends Rails' `create_table` method # diff --git a/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers.rb b/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers.rb new file mode 100644 index 00000000000..01ff3dcbfb8 --- /dev/null +++ b/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module MigrationHelpers + module WraparoundVacuumHelpers + class WraparoundCheck + WraparoundError = Class.new(StandardError) + + def initialize(table_name, migration:) + @migration = migration + @table_name = table_name + + validate_table_existence! + end + + def execute + return if disabled? + return unless wraparound_vacuum.present? + + log "Autovacuum with wraparound prevention mode is running on `#{table_name}`", title: true + log "This process prevents the migration from acquiring the necessary locks" + log "Query: `#{wraparound_vacuum[:query]}`" + log "Current duration: #{wraparound_vacuum[:duration].inspect}" + log "Process id: #{wraparound_vacuum[:pid]}" + log "You can wait until it completes or if absolutely necessary interrupt it using: " \ + "`select pg_cancel_backend(#{wraparound_vacuum[:pid]});`" + log "Be aware that a new process will kick in immediately, so multiple interruptions " \ + "might be required to time it right with the locks retry mechanism" + end + + private + + attr_reader :table_name + + delegate :say, :connection, to: :@migration + + def wraparound_vacuum + @wraparound_vacuum ||= transform_wraparound_vacuum + end + + def transform_wraparound_vacuum + result = raw_wraparound_vacuum + values = Array.wrap(result.cast_values.first) + + result.columns.zip(values).to_h.with_indifferent_access.compact + end + + def raw_wraparound_vacuum + connection.select_all(<<~SQL.squish) + SELECT pid, state, age(clock_timestamp(), query_start) as duration, query + FROM pg_stat_activity + WHERE query ILIKE '%VACUUM%' || #{quoted_table_name} || '%(to prevent wraparound)' + AND backend_type = 'autovacuum worker' + LIMIT 1 + SQL + end + + def validate_table_existence! + return if connection.table_exists?(table_name) + + raise WraparoundError, "Table #{table_name} does not exist" + end + + def quoted_table_name + connection.quote(table_name) + end + + def disabled? + return true unless wraparound_check_allowed? + + Gitlab::Utils.to_boolean(ENV['GITLAB_MIGRATIONS_DISABLE_WRAPAROUND_CHECK']) + end + + def wraparound_check_allowed? + Gitlab.com? || Gitlab.dev_or_test_env? + end + + def log(text, title: false) + say text, !title + end + end + + def check_if_wraparound_in_progress(table_name) + WraparoundCheck.new(table_name, migration: self).execute + end + end + end + end +end diff --git a/lib/gitlab/database/migrations/pg_backend_pid.rb b/lib/gitlab/database/migrations/pg_backend_pid.rb new file mode 100644 index 00000000000..0c15aae9395 --- /dev/null +++ b/lib/gitlab/database/migrations/pg_backend_pid.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module Migrations + module PgBackendPid + module MigratorPgBackendPid + extend ::Gitlab::Utils::Override + + override :with_advisory_lock_connection + def with_advisory_lock_connection + super do |conn| + Gitlab::Database::Migrations::PgBackendPid.say(conn) + + yield(conn) + + Gitlab::Database::Migrations::PgBackendPid.say(conn) + end + end + end + + def self.patch! + ActiveRecord::Migrator.prepend(MigratorPgBackendPid) + end + + def self.say(conn) + pg_backend_pid = conn.select_value('SELECT pg_backend_pid()') + db_name = Gitlab::Database.db_config_name(conn) + + # rubocop:disable Rails/Output + puts "#{db_name}: == [advisory_lock_connection] " \ + "object_id: #{conn.object_id}, pg_backend_pid: #{pg_backend_pid}" + # rubocop:enable Rails/Output + end + end + end + end +end diff --git a/lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb b/lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb index 58447481e60..afca2368126 100644 --- a/lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb +++ b/lib/gitlab/database/partitioning/convert_table_to_first_list_partition.rb @@ -8,6 +8,8 @@ module Gitlab SQL_STATEMENT_SEPARATOR = ";\n\n" + PARTITIONING_CONSTRAINT_NAME = 'partitioning_constraint' + attr_reader :partitioning_column, :table_name, :parent_table_name, :zero_partition_value def initialize( @@ -23,10 +25,10 @@ module Gitlab @lock_tables = Array.wrap(lock_tables) end - def prepare_for_partitioning + def prepare_for_partitioning(async: false) assert_existing_constraints_partitionable - add_partitioning_check_constraint + add_partitioning_check_constraint(async: async) end def revert_preparation_for_partitioning @@ -36,6 +38,7 @@ module Gitlab def partition assert_existing_constraints_partitionable assert_partitioning_constraint_present + create_parent_table attach_foreign_keys_to_parent @@ -45,7 +48,9 @@ module Gitlab } migration_context.with_lock_retries(**lock_args) do - migration_context.execute(sql_to_convert_table) + redefine_loose_foreign_key_triggers do + migration_context.execute(sql_to_convert_table) + end end end @@ -118,16 +123,17 @@ module Gitlab constraints_on_column = Gitlab::Database::PostgresConstraint .by_table_identifier(table_identifier) .check_constraints - .valid .including_column(partitioning_column) - constraints_on_column.to_a.find do |constraint| - constraint.definition == "CHECK ((#{partitioning_column} = #{zero_partition_value}))" + check_body = "CHECK ((#{partitioning_column} = #{zero_partition_value}))" + + constraints_on_column.find do |constraint| + constraint.definition.start_with?(check_body) end end def assert_partitioning_constraint_present - return if partitioning_constraint + return if partitioning_constraint&.constraint_valid? raise UnableToPartition, <<~MSG Table #{table_name} is not ready for partitioning. @@ -135,14 +141,43 @@ module Gitlab MSG end - def add_partitioning_check_constraint - return if partitioning_constraint.present? + def add_partitioning_check_constraint(async: false) + return validate_partitioning_constraint_synchronously if partitioning_constraint.present? check_body = "#{partitioning_column} = #{connection.quote(zero_partition_value)}" # Any constraint name would work. The constraint is found based on its definition before partitioning - migration_context.add_check_constraint(table_name, check_body, 'partitioning_constraint') + migration_context.add_check_constraint( + table_name, check_body, PARTITIONING_CONSTRAINT_NAME, + validate: !async + ) + + if async + migration_context.prepare_async_check_constraint_validation( + table_name, name: PARTITIONING_CONSTRAINT_NAME + ) + end + + return if partitioning_constraint.present? - raise UnableToPartition, 'Error adding partitioning constraint' unless partitioning_constraint.present? + raise UnableToPartition, <<~MSG + Error adding partitioning constraint `#{PARTITIONING_CONSTRAINT_NAME}` for `#{table_name}` + MSG + end + + def validate_partitioning_constraint_synchronously + if partitioning_constraint.constraint_valid? + return Gitlab::AppLogger.info <<~MSG + Nothing to do, the partitioning constraint exists and is valid for `#{table_name}` + MSG + end + + # Async validations are executed only on .com, we need to validate synchronously for self-managed + migration_context.validate_check_constraint(table_name, partitioning_constraint.name) + return if partitioning_constraint.constraint_valid? + + raise UnableToPartition, <<~MSG + Error validating partitioning constraint `#{partitioning_constraint.name}` for `#{table_name}` + MSG end def create_parent_table @@ -262,6 +297,19 @@ module Gitlab iterations + aggressive_iterations end + + def redefine_loose_foreign_key_triggers + if migration_context.has_loose_foreign_key?(table_name) + migration_context.untrack_record_deletions(table_name) + + yield if block_given? + + migration_context.track_record_deletions(parent_table_name) + migration_context.track_record_deletions(table_name) + elsif block_given? + yield + end + end end end end diff --git a/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb index dcf457b9d63..e87707953ae 100644 --- a/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb +++ b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb @@ -21,7 +21,7 @@ module Gitlab return end - bulk_copy = BulkCopy.new(source_table, partitioned_table, source_column, connection: connection) + bulk_copy = Gitlab::Database::PartitioningMigrationHelpers::BulkCopy.new(source_table, partitioned_table, source_column, connection: connection) parent_batch_relation = relation_scoped_to_range(source_table, source_column, start_id, stop_id) parent_batch_relation.each_batch(of: SUB_BATCH_SIZE) do |sub_batch| @@ -56,41 +56,6 @@ module Gitlab def mark_jobs_as_succeeded(*arguments) BackgroundMigrationJob.mark_all_as_succeeded(self.class.name, arguments) end - - # Helper class to copy data between two tables via upserts - class BulkCopy - DELIMITER = ', ' - - attr_reader :source_table, :destination_table, :source_column, :connection - - def initialize(source_table, destination_table, source_column, connection:) - @source_table = source_table - @destination_table = destination_table - @source_column = source_column - @connection = connection - end - - def copy_between(start_id, stop_id) - connection.execute(<<~SQL) - INSERT INTO #{destination_table} (#{column_listing}) - SELECT #{column_listing} - FROM #{source_table} - WHERE #{source_column} BETWEEN #{start_id} AND #{stop_id} - FOR UPDATE - ON CONFLICT (#{conflict_targets}) DO NOTHING - SQL - end - - private - - def column_listing - @column_listing ||= connection.columns(source_table).map(&:name).join(DELIMITER) - end - - def conflict_targets - connection.primary_key(destination_table).join(DELIMITER) - end - end end end end diff --git a/lib/gitlab/database/partitioning_migration_helpers/bulk_copy.rb b/lib/gitlab/database/partitioning_migration_helpers/bulk_copy.rb new file mode 100644 index 00000000000..b8f5a2e3ad4 --- /dev/null +++ b/lib/gitlab/database/partitioning_migration_helpers/bulk_copy.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module PartitioningMigrationHelpers + # Helper class to copy data between two tables via upserts + class BulkCopy + DELIMITER = ', ' + + attr_reader :source_table, :destination_table, :source_column, :connection + + def initialize(source_table, destination_table, source_column, connection:) + @source_table = source_table + @destination_table = destination_table + @source_column = source_column + @connection = connection + end + + def copy_between(start_id, stop_id) + connection.execute(<<~SQL) + INSERT INTO #{destination_table} (#{column_listing}) + SELECT #{column_listing} + FROM #{source_table} + WHERE #{source_column} BETWEEN #{start_id} AND #{stop_id} + FOR UPDATE + ON CONFLICT (#{conflict_targets}) DO NOTHING + SQL + end + + private + + def column_listing + @column_listing ||= connection.columns(source_table).map(&:name).join(DELIMITER) + end + + def conflict_targets + connection.primary_keys(destination_table).join(DELIMITER) + end + end + end + end +end diff --git a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb index f9790bf53b9..e3cf1298df6 100644 --- a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb +++ b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb @@ -6,13 +6,16 @@ module Gitlab module TableManagementHelpers include ::Gitlab::Database::SchemaHelpers include ::Gitlab::Database::MigrationHelpers + include ::Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers ALLOWED_TABLES = %w[audit_events web_hook_logs].freeze ERROR_SCOPE = 'table partitioning' MIGRATION_CLASS_NAME = "::#{module_parent_name}::BackfillPartitionedTable" + MIGRATION = "BackfillPartitionedTable" BATCH_INTERVAL = 2.minutes.freeze BATCH_SIZE = 50_000 + SUB_BATCH_SIZE = 2_500 JobArguments = Struct.new(:start_id, :stop_id, :source_table_name, :partitioned_table_name, :source_column) do def self.from_array(arguments) @@ -107,7 +110,16 @@ module Gitlab partitioned_table_name = make_partitioned_table_name(table_name) primary_key = connection.primary_key(table_name) - enqueue_background_migration(table_name, partitioned_table_name, primary_key) + + queue_batched_background_migration( + MIGRATION, + table_name, + primary_key, + partitioned_table_name, + batch_size: BATCH_SIZE, + sub_batch_size: SUB_BATCH_SIZE, + job_interval: BATCH_INTERVAL + ) end # Cleanup a previously enqueued background migration to copy data into a partitioned table. This will not @@ -149,7 +161,7 @@ module Gitlab # 2. Inline copy any missed rows from the original table to the partitioned table # # **NOTE** Migrations using this method cannot be scheduled in the same release as the migration that - # schedules the background migration using the `enqueue_background_migration` helper, or else the + # schedules the background migration using the `enqueue_partitioning_data_migration` helper, or else the # background migration jobs will be force-executed. # # Example: @@ -251,7 +263,7 @@ module Gitlab create_sync_trigger(source_table_name, trigger_name, function_name) end - def prepare_constraint_for_list_partitioning(table_name:, partitioning_column:, parent_table_name:, initial_partitioning_value:) + def prepare_constraint_for_list_partitioning(table_name:, partitioning_column:, parent_table_name:, initial_partitioning_value:, async: false) validate_not_in_transaction!(:prepare_constraint_for_list_partitioning) Gitlab::Database::Partitioning::ConvertTableToFirstListPartition @@ -260,7 +272,7 @@ module Gitlab parent_table_name: parent_table_name, partitioning_column: partitioning_column, zero_partition_value: initial_partitioning_value - ).prepare_for_partitioning + ).prepare_for_partitioning(async: async) end def revert_preparing_constraint_for_list_partitioning(table_name:, partitioning_column:, parent_table_name:, initial_partitioning_value:) @@ -444,18 +456,6 @@ module Gitlab create_trigger(table_name, trigger_name, function_name, fires: 'AFTER INSERT OR UPDATE OR DELETE') end - def enqueue_background_migration(source_table_name, partitioned_table_name, source_column) - source_model = define_batchable_model(source_table_name) - - queue_background_migration_jobs_by_range_at_intervals( - source_model, - MIGRATION_CLASS_NAME, - BATCH_INTERVAL, - batch_size: BATCH_SIZE, - other_job_arguments: [source_table_name.to_s, partitioned_table_name, source_column], - track_jobs: true) - end - def cleanup_migration_jobs(table_name) ::Gitlab::Database::BackgroundMigrationJob.for_partitioning_migration(MIGRATION_CLASS_NAME, table_name).delete_all end diff --git a/lib/gitlab/database/postgres_foreign_key.rb b/lib/gitlab/database/postgres_foreign_key.rb index 28044b42f44..bb3e1d45f15 100644 --- a/lib/gitlab/database/postgres_foreign_key.rb +++ b/lib/gitlab/database/postgres_foreign_key.rb @@ -5,6 +5,8 @@ module Gitlab class PostgresForeignKey < SharedModel self.primary_key = :oid + has_many :child_foreign_keys, class_name: 'Gitlab::Database::PostgresForeignKey', foreign_key: 'parent_oid' + # These values come from the possible confdeltype / confupdtype values in pg_constraint ACTION_TYPES = { restrict: 'r', diff --git a/lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb b/lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb index 713e1f772e3..50a3ad0d8ad 100644 --- a/lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb +++ b/lib/gitlab/database/query_analyzers/prevent_cross_database_modification.rb @@ -22,12 +22,27 @@ module Gitlab self.with_suppressed(false, &blk) end + # This method will temporary ignore the given tables in a current transaction + # This is meant to disable `PreventCrossDB` check for some well known failures + def self.temporary_ignore_tables_in_transaction(tables, url:, &blk) + return yield unless context&.dig(:ignored_tables) + + begin + prev_ignored_tables = context[:ignored_tables] + context[:ignored_tables] = prev_ignored_tables + tables + yield + ensure + context[:ignored_tables] = prev_ignored_tables + end + end + def self.begin! super context.merge!({ transaction_depth_by_db: Hash.new { |h, k| h[k] = 0 }, - modified_tables_by_db: Hash.new { |h, k| h[k] = Set.new } + modified_tables_by_db: Hash.new { |h, k| h[k] = Set.new }, + ignored_tables: [] }) end @@ -57,7 +72,7 @@ module Gitlab if context[:transaction_depth_by_db][database] == 0 context[:modified_tables_by_db][database].clear - # Attempt to troubleshoot https://gitlab.com/gitlab-org/gitlab/-/issues/351531 + # Attempt to troubleshoot https://gitlab.com/gitlab-org/gitlab/-/issues/351531 ::CrossDatabaseModification::TransactionStackTrackRecord.log_gitlab_transactions_stack(action: :end_of_transaction) elsif context[:transaction_depth_by_db][database] < 0 context[:transaction_depth_by_db][database] = 0 @@ -79,6 +94,9 @@ module Gitlab # https://gitlab.com/gitlab-org/gitlab/-/issues/343394 tables -= %w[plans gitlab_subscriptions] + # Ignore some tables + tables -= context[:ignored_tables].to_a + return if tables.empty? # All migrations will write to schema_migrations in the same transaction. diff --git a/lib/gitlab/database/schema_helpers.rb b/lib/gitlab/database/schema_helpers.rb index d81ff4ff1ae..3ae696a71d8 100644 --- a/lib/gitlab/database/schema_helpers.rb +++ b/lib/gitlab/database/schema_helpers.rb @@ -31,8 +31,8 @@ module Gitlab end def trigger_exists?(table_name, name) - connection.select_value(<<~SQL) - SELECT 1 + result = connection.select_value(<<~SQL.squish) + SELECT true FROM pg_catalog.pg_trigger trgr INNER JOIN pg_catalog.pg_class rel ON trgr.tgrelid = rel.oid @@ -42,6 +42,8 @@ module Gitlab AND rel.relname = #{connection.quote(table_name)} AND trgr.tgname = #{connection.quote(name)} SQL + + !!result end def drop_function(name, if_exists: true) diff --git a/lib/gitlab/database/schema_validation/adapters/column_database_adapter.rb b/lib/gitlab/database/schema_validation/adapters/column_database_adapter.rb new file mode 100644 index 00000000000..10603b3dbad --- /dev/null +++ b/lib/gitlab/database/schema_validation/adapters/column_database_adapter.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module Adapters + class ColumnDatabaseAdapter + def initialize(query_result) + @query_result = query_result + end + + def name + @name ||= query_result['column_name'] + end + + def table_name + query_result['table_name'] + end + + def data_type + query_result['data_type'] + end + + def default + return unless query_result['column_default'] + + return if name == 'id' || query_result['column_default'].include?('nextval') + + "DEFAULT #{query_result['column_default']}" + end + + def nullable + 'NOT NULL' if query_result['not_null'] + end + + private + + attr_reader :query_result + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter.rb b/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter.rb new file mode 100644 index 00000000000..30a13b5dff1 --- /dev/null +++ b/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter.rb @@ -0,0 +1,114 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module Adapters + UndefinedPGType = Class.new(StandardError) + + class ColumnStructureSqlAdapter + NOT_NULL_CONSTR = :CONSTR_NOTNULL + DEFAULT_CONSTR = :CONSTR_DEFAULT + + MAPPINGS = { + 't' => 'true', + 'f' => 'false' + }.freeze + + attr_reader :table_name + + def initialize(table_name, pg_query_stmt) + @table_name = table_name + @pg_query_stmt = pg_query_stmt + end + + def name + @name ||= pg_query_stmt.colname + end + + def data_type + type(pg_query_stmt.type_name) + end + + def default + return if name == 'id' + + value = parse_node(constraints.find { |node| node.constraint.contype == DEFAULT_CONSTR }) + + return unless value + + "DEFAULT #{value}" + end + + def nullable + 'NOT NULL' if constraints.any? { |node| node.constraint.contype == NOT_NULL_CONSTR } + end + + private + + attr_reader :pg_query_stmt + + def constraints + @constraints ||= pg_query_stmt.constraints + end + + # Returns the node type + # + # pg_type:: type alias, used internally by postgres, +int4+, +int8+, +bool+, +varchar+ + # type:: type name, like +integer+, +bigint+, +boolean+, +character varying+. + # array_ext:: adds the +[]+ extension for array types. + # precision_ext:: adds the precision, if have any, like +(255)+, +(6)+. + # + # @info +timestamp+ and +timestamptz+ have a particular case when precision is defined. + # In this case, the order of the statement needs to be re-arranged from + # timestamp without time zone(6) to timestamp(6) without a time zone. + def type(node) + pg_type = parse_node(node.names.last) + type = PgTypes::TYPES.fetch(pg_type).dup + array_ext = '[]' if node.array_bounds.any? + precision_ext = "(#{node.typmods.map { |typmod| parse_node(typmod) }.join(',')})" if node.typmods.any? + + if %w[timestamp timestamptz].include?(pg_type) + type.gsub!('timestamp', ['timestamp', precision_ext].compact.join('')) + precision_ext = nil + end + + [type, precision_ext, array_ext].compact.join('') + rescue KeyError => exception + raise UndefinedPGType, exception.message + end + + # Parses PGQuery nodes recursively + # + # :constraint:: nodes that groups column default info + # :func_cal:: nodes that stores functions, like +now()+ + # :a_const:: nodes that stores constant values, like +t+, +f+, +0.0.0.0+, +255+, +1.0+ + # :type_cast:: nodes that stores casting values, like +'name'::text+, +'0.0.0.0'::inet+ + # else:: extract node values in the last iteration of the recursion, like +int4+, +1.0+, +now+, +255+ + # + # @note boolean types types are mapped from +t+, +f+ to +true+, +false+ + def parse_node(node) + return unless node + + case node.node + when :constraint + parse_node(node.constraint.raw_expr) + when :func_call + "#{parse_node(node.func_call.funcname.first)}()" + when :a_const + parse_node(node.a_const.val) + when :type_cast + value = parse_node(node.type_cast.arg) + type = type(node.type_cast.type_name) + separator = MAPPINGS.key?(value) ? '' : "::#{type}" + + [MAPPINGS.fetch(value, "'#{value}'"), separator].compact.join('') + else + node.to_h[node.node].values.last + end + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/database.rb b/lib/gitlab/database/schema_validation/database.rb index 07bd02e58e1..9ff4a843e6d 100644 --- a/lib/gitlab/database/schema_validation/database.rb +++ b/lib/gitlab/database/schema_validation/database.rb @@ -18,6 +18,10 @@ module Gitlab trigger_map[trigger_name] end + def fetch_table_by_name(table_name) + table_map[table_name] + end + def index_exists?(index_name) index_map[index_name].present? end @@ -26,6 +30,10 @@ module Gitlab trigger_map[trigger_name].present? end + def table_exists?(table_name) + fetch_table_by_name(table_name).present? + end + def indexes index_map.values end @@ -34,6 +42,10 @@ module Gitlab trigger_map.values end + def tables + table_map.values + end + private attr_reader :connection @@ -56,6 +68,14 @@ module Gitlab end end + def table_map + @table_map ||= fetch_tables.transform_values! do |stmt| + columns = stmt.map { |column| SchemaObjects::Column.new(Adapters::ColumnDatabaseAdapter.new(column)) } + + SchemaObjects::Table.new(stmt.first['table_name'], columns) + end + end + def fetch_indexes sql = <<~SQL SELECT indexname, indexdef @@ -78,6 +98,28 @@ module Gitlab connection.select_rows(sql, nil, schemas).to_h end + + def fetch_tables + sql = <<~SQL + SELECT + table_information.relname AS table_name, + col_information.attname AS column_name, + col_information.attnotnull AS not_null, + format_type(col_information.atttypid, col_information.atttypmod) AS data_type, + pg_get_expr(col_default_information.adbin, col_default_information.adrelid) AS column_default + FROM pg_attribute AS col_information + JOIN pg_class AS table_information ON col_information.attrelid = table_information.oid + JOIN pg_namespace AS schema_information ON table_information.relnamespace = schema_information.oid + LEFT JOIN pg_attrdef AS col_default_information ON col_information.attrelid = col_default_information.adrelid + AND col_information.attnum = col_default_information.adnum + WHERE NOT col_information.attisdropped + AND col_information.attnum > 0 + AND table_information.relkind IN ('r', 'p') + AND schema_information.nspname IN ($1, $2) + SQL + + connection.exec_query(sql, nil, schemas).group_by { |row| row['table_name'] } + end end end end diff --git a/lib/gitlab/database/schema_validation/inconsistency.rb b/lib/gitlab/database/schema_validation/inconsistency.rb new file mode 100644 index 00000000000..c834a6bd693 --- /dev/null +++ b/lib/gitlab/database/schema_validation/inconsistency.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + class Inconsistency + def initialize(validator_class, structure_sql_object, database_object) + @validator_class = validator_class + @structure_sql_object = structure_sql_object + @database_object = database_object + end + + def error_message + format(validator_class::ERROR_MESSAGE, object_name) + end + + def type + validator_class.name.demodulize.underscore + end + + def table_name + structure_sql_object&.table_name || database_object&.table_name + end + + def object_name + structure_sql_object&.name || database_object&.name + end + + def diff + Diffy::Diff.new(structure_sql_statement, database_statement) + end + + def inspect + <<~MSG + #{'-' * 54} + #{error_message} + Diff: + #{diff.to_s(:color)} + #{'-' * 54} + MSG + end + + private + + attr_reader :validator_class, :structure_sql_object, :database_object + + def structure_sql_statement + return unless structure_sql_object + + "#{structure_sql_object.statement}\n" + end + + def database_statement + return unless database_object + + "#{database_object.statement}\n" + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/pg_types.rb b/lib/gitlab/database/schema_validation/pg_types.rb new file mode 100644 index 00000000000..0a1999d056e --- /dev/null +++ b/lib/gitlab/database/schema_validation/pg_types.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + class PgTypes + TYPES = { + 'bool' => 'boolean', + 'bytea' => 'bytea', + 'char' => '"char"', + 'int8' => 'bigint', + 'int2' => 'smallint', + 'int4' => 'integer', + 'regproc' => 'regproc', + 'text' => 'text', + 'oid' => 'oid', + 'tid' => 'tid', + 'xid' => 'xid', + 'cid' => 'cid', + 'json' => 'json', + 'xml' => 'xml', + 'pg_node_tree' => 'pg_node_tree', + 'pg_ndistinct' => 'pg_ndistinct', + 'pg_dependencies' => 'pg_dependencies', + 'pg_mcv_list' => 'pg_mcv_list', + 'xid8' => 'xid8', + 'path' => 'path', + 'polygon' => 'polygon', + 'float4' => 'real', + 'float8' => 'double precision', + 'circle' => 'circle', + 'money' => 'money', + 'macaddr' => 'macaddr', + 'inet' => 'inet', + 'cidr' => 'cidr', + 'macaddr8' => 'macaddr8', + 'aclitem' => 'aclitem', + 'bpchar' => 'character', + 'varchar' => 'character varying', + 'date' => 'date', + 'time' => 'time without time zone', + 'timestamp' => 'timestamp without time zone', + 'timestamptz' => 'timestamp with time zone', + 'interval' => 'interval', + 'timetz' => 'time with time zone', + 'bit' => 'bit', + 'varbit' => 'bit varying', + 'numeric' => 'numeric', + 'refcursor' => 'refcursor', + 'regprocedure' => 'regprocedure', + 'regoper' => 'regoper', + 'regoperator' => 'regoperator', + 'regclass' => 'regclass', + 'regcollation' => 'regcollation', + 'regtype' => 'regtype', + 'regrole' => 'regrole', + 'regnamespace' => 'regnamespace', + 'uuid' => 'uuid', + 'pg_lsn' => 'pg_lsn', + 'tsvector' => 'tsvector', + 'gtsvector' => 'gtsvector', + 'tsquery' => 'tsquery', + 'regconfig' => 'regconfig', + 'regdictionary' => 'regdictionary', + 'jsonb' => 'jsonb', + 'jsonpath' => 'jsonpath', + 'txid_snapshot' => 'txid_snapshot', + 'pg_snapshot' => 'pg_snapshot' + }.freeze + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/schema_inconsistency.rb b/lib/gitlab/database/schema_validation/schema_inconsistency.rb new file mode 100644 index 00000000000..6f50603e784 --- /dev/null +++ b/lib/gitlab/database/schema_validation/schema_inconsistency.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + class SchemaInconsistency < ApplicationRecord + self.table_name = :schema_inconsistencies + + belongs_to :issue + + validates :object_name, :valitador_name, :table_name, presence: true + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/schema_objects/base.rb b/lib/gitlab/database/schema_validation/schema_objects/base.rb index b0c8eb087dd..43d30dc54ae 100644 --- a/lib/gitlab/database/schema_validation/schema_objects/base.rb +++ b/lib/gitlab/database/schema_validation/schema_objects/base.rb @@ -13,6 +13,10 @@ module Gitlab raise NoMethodError, "subclasses of #{self.class.name} must implement #{__method__}" end + def table_name + parsed_stmt.relation.relname + end + def statement @statement ||= PgQuery.deparse_stmt(parsed_stmt) end diff --git a/lib/gitlab/database/schema_validation/schema_objects/column.rb b/lib/gitlab/database/schema_validation/schema_objects/column.rb new file mode 100644 index 00000000000..38ad8e309a3 --- /dev/null +++ b/lib/gitlab/database/schema_validation/schema_objects/column.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module SchemaObjects + class Column + def initialize(adapter) + @adapter = adapter + end + + attr_reader :adapter + + delegate :name, :table_name, to: :adapter + + def statement + [name, adapter.data_type, adapter.default, adapter.nullable].compact.join(' ') + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/schema_objects/table.rb b/lib/gitlab/database/schema_validation/schema_objects/table.rb new file mode 100644 index 00000000000..6f573e7027f --- /dev/null +++ b/lib/gitlab/database/schema_validation/schema_objects/table.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module SchemaObjects + class Table + def initialize(name, columns) + @name = name + @columns = columns + end + + attr_reader :name, :columns + + def table_name + name + end + + def statement + format('CREATE TABLE %s (%s)', name, columns_statement) + end + + def fetch_column_by_name(column_name) + columns.find { |column| column.name == column_name } + end + + def column_exists?(column_name) + fetch_column_by_name(column_name).present? + end + + private + + def columns_statement + columns.map(&:statement).join(', ') + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/structure_sql.rb b/lib/gitlab/database/schema_validation/structure_sql.rb index cb62af8d8b8..e93c33aedcd 100644 --- a/lib/gitlab/database/schema_validation/structure_sql.rb +++ b/lib/gitlab/database/schema_validation/structure_sql.rb @@ -19,6 +19,14 @@ module Gitlab triggers.find { |trigger| trigger.name == trigger_name }.present? end + def fetch_table_by_name(table_name) + tables.find { |table| table.name == table_name } + end + + def table_exists?(table_name) + fetch_table_by_name(table_name).present? + end + def indexes @indexes ||= map_with_default_schema(index_statements, SchemaObjects::Index) end @@ -27,6 +35,18 @@ module Gitlab @triggers ||= map_with_default_schema(trigger_statements, SchemaObjects::Trigger) end + def tables + @tables ||= table_statements.map do |stmt| + table_name = stmt.relation.relname + + columns = stmt.table_elts.select { |n| n.node == :column_def }.map do |column| + SchemaObjects::Column.new(Adapters::ColumnStructureSqlAdapter.new(table_name, column.column_def)) + end + + SchemaObjects::Table.new(table_name, columns) + end + end + private attr_reader :structure_file_path, :schema_name @@ -39,6 +59,10 @@ module Gitlab statements.filter_map { |s| s.stmt.create_trig_stmt } end + def table_statements + statements.filter_map { |s| s.stmt.create_stmt } + end + def statements @statements ||= parsed_structure_file.tree.stmts end diff --git a/lib/gitlab/database/schema_validation/track_inconsistency.rb b/lib/gitlab/database/schema_validation/track_inconsistency.rb new file mode 100644 index 00000000000..c7e946be647 --- /dev/null +++ b/lib/gitlab/database/schema_validation/track_inconsistency.rb @@ -0,0 +1,77 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + class TrackInconsistency + def initialize(inconsistency, project, user) + @inconsistency = inconsistency + @project = project + @user = user + end + + def execute + return unless Gitlab.com? + return if inconsistency_record.present? + + result = ::Issues::CreateService.new(container: project, current_user: user, params: params, + spam_params: nil).execute + + track_inconsistency(result[:issue]) if result.success? + end + + private + + attr_reader :inconsistency, :project, :user + + def track_inconsistency(issue) + schema_inconsistency_model.create( + issue: issue, + object_name: inconsistency.object_name, + table_name: inconsistency.table_name, + valitador_name: inconsistency.type + ) + end + + def params + { + title: issue_title, + description: issue_description, + confidential: true, + issue_type: 'issue', + labels: %w[database database-inconsistency-report] + } + end + + def issue_title + "New schema inconsistency: #{inconsistency.object_name}" + end + + def issue_description + <<~MSG + We have detected a new schema inconsistency. + + Table_name: #{inconsistency.table_name} + Object_name: #{inconsistency.object_name} + Validator_name: #{inconsistency.type} + Error_message: #{inconsistency.error_message} + + For more information, please contact the database team. + MSG + end + + def schema_inconsistency_model + Gitlab::Database::SchemaValidation::SchemaInconsistency + end + + def inconsistency_record + schema_inconsistency_model.find_by( + object_name: inconsistency.object_name, + table_name: inconsistency.table_name, + valitador_name: inconsistency.type + ) + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/validators/base_validator.rb b/lib/gitlab/database/schema_validation/validators/base_validator.rb index 14995b5f378..58e0bf5292b 100644 --- a/lib/gitlab/database/schema_validation/validators/base_validator.rb +++ b/lib/gitlab/database/schema_validation/validators/base_validator.rb @@ -5,7 +5,7 @@ module Gitlab module SchemaValidation module Validators class BaseValidator - Inconsistency = Struct.new(:type, :object_name, :statement) + ERROR_MESSAGE = 'A schema inconsistency has been found' def initialize(structure_sql, database) @structure_sql = structure_sql @@ -14,10 +14,15 @@ module Gitlab def self.all_validators [ + ExtraTables, + ExtraTableColumns, ExtraIndexes, ExtraTriggers, + MissingTables, + MissingTableColumns, MissingIndexes, MissingTriggers, + DifferentDefinitionTables, DifferentDefinitionIndexes, DifferentDefinitionTriggers ] @@ -31,10 +36,8 @@ module Gitlab attr_reader :structure_sql, :database - def build_inconsistency(validator_class, schema_object) - inconsistency_type = validator_class.name.demodulize.underscore - - Inconsistency.new(inconsistency_type, schema_object.name, schema_object.statement) + def build_inconsistency(validator_class, structure_sql_object, database_object) + Inconsistency.new(validator_class, structure_sql_object, database_object) end end end diff --git a/lib/gitlab/database/schema_validation/validators/different_definition_indexes.rb b/lib/gitlab/database/schema_validation/validators/different_definition_indexes.rb index d54b62ac1e7..ba12b3cdc61 100644 --- a/lib/gitlab/database/schema_validation/validators/different_definition_indexes.rb +++ b/lib/gitlab/database/schema_validation/validators/different_definition_indexes.rb @@ -5,6 +5,8 @@ module Gitlab module SchemaValidation module Validators class DifferentDefinitionIndexes < BaseValidator + ERROR_MESSAGE = "The %s index has a different statement between structure.sql and database" + def execute structure_sql.indexes.filter_map do |structure_sql_index| database_index = database.fetch_index_by_name(structure_sql_index.name) @@ -12,7 +14,7 @@ module Gitlab next if database_index.nil? next if database_index.statement == structure_sql_index.statement - build_inconsistency(self.class, structure_sql_index) + build_inconsistency(self.class, structure_sql_index, database_index) end end end diff --git a/lib/gitlab/database/schema_validation/validators/different_definition_tables.rb b/lib/gitlab/database/schema_validation/validators/different_definition_tables.rb new file mode 100644 index 00000000000..9fbddbd3fcd --- /dev/null +++ b/lib/gitlab/database/schema_validation/validators/different_definition_tables.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module Validators + class DifferentDefinitionTables < BaseValidator + ERROR_MESSAGE = "The table %s has a different column statement between structure.sql and database" + + def execute + structure_sql.tables.filter_map do |structure_sql_table| + table_name = structure_sql_table.name + database_table = database.fetch_table_by_name(table_name) + + next unless database_table + + db_diffs, structure_diffs = column_diffs(database_table, structure_sql_table.columns) + + if db_diffs.any? + build_inconsistency(self.class, + SchemaObjects::Table.new(table_name, db_diffs), + SchemaObjects::Table.new(table_name, structure_diffs)) + end + end + end + + private + + def column_diffs(db_table, columns) + db_diffs = [] + structure_diffs = [] + + columns.each do |column| + db_column = db_table.fetch_column_by_name(column.name) + + next unless db_column + + next if db_column.statement == column.statement + + db_diffs << db_column + structure_diffs << column + end + + [db_diffs, structure_diffs] + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/validators/different_definition_triggers.rb b/lib/gitlab/database/schema_validation/validators/different_definition_triggers.rb index efb87a70ca8..79ffe9a6a98 100644 --- a/lib/gitlab/database/schema_validation/validators/different_definition_triggers.rb +++ b/lib/gitlab/database/schema_validation/validators/different_definition_triggers.rb @@ -5,6 +5,8 @@ module Gitlab module SchemaValidation module Validators class DifferentDefinitionTriggers < BaseValidator + ERROR_MESSAGE = "The %s trigger has a different statement between structure.sql and database" + def execute structure_sql.triggers.filter_map do |structure_sql_trigger| database_trigger = database.fetch_trigger_by_name(structure_sql_trigger.name) @@ -12,7 +14,7 @@ module Gitlab next if database_trigger.nil? next if database_trigger.statement == structure_sql_trigger.statement - build_inconsistency(self.class, structure_sql_trigger) + build_inconsistency(self.class, structure_sql_trigger, nil) end end end diff --git a/lib/gitlab/database/schema_validation/validators/extra_indexes.rb b/lib/gitlab/database/schema_validation/validators/extra_indexes.rb index 28384dd7cee..c8d3749894b 100644 --- a/lib/gitlab/database/schema_validation/validators/extra_indexes.rb +++ b/lib/gitlab/database/schema_validation/validators/extra_indexes.rb @@ -5,11 +5,13 @@ module Gitlab module SchemaValidation module Validators class ExtraIndexes < BaseValidator + ERROR_MESSAGE = "The index %s is present in the database, but not in the structure.sql file" + def execute - database.indexes.filter_map do |index| - next if structure_sql.index_exists?(index.name) + database.indexes.filter_map do |database_index| + next if structure_sql.index_exists?(database_index.name) - build_inconsistency(self.class, index) + build_inconsistency(self.class, nil, database_index) end end end diff --git a/lib/gitlab/database/schema_validation/validators/extra_table_columns.rb b/lib/gitlab/database/schema_validation/validators/extra_table_columns.rb new file mode 100644 index 00000000000..823b01cf808 --- /dev/null +++ b/lib/gitlab/database/schema_validation/validators/extra_table_columns.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module Validators + class ExtraTableColumns < BaseValidator + ERROR_MESSAGE = "The table %s has columns present in the database, but not in the structure.sql file" + + def execute + database.tables.filter_map do |database_table| + table_name = database_table.name + structure_sql_table = structure_sql.fetch_table_by_name(table_name) + + next unless structure_sql_table + + inconsistencies = database_table.columns.filter_map do |database_table_column| + next if structure_sql_table.column_exists?(database_table_column.name) + + database_table_column + end + + if inconsistencies.any? + build_inconsistency(self.class, nil, SchemaObjects::Table.new(table_name, inconsistencies)) + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/validators/extra_tables.rb b/lib/gitlab/database/schema_validation/validators/extra_tables.rb new file mode 100644 index 00000000000..99e98eb8f67 --- /dev/null +++ b/lib/gitlab/database/schema_validation/validators/extra_tables.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module Validators + class ExtraTables < BaseValidator + ERROR_MESSAGE = "The table %s is present in the database, but not in the structure.sql file" + + def execute + database.tables.filter_map do |database_table| + next if structure_sql.table_exists?(database_table.name) + + build_inconsistency(self.class, nil, database_table) + end + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/validators/extra_triggers.rb b/lib/gitlab/database/schema_validation/validators/extra_triggers.rb index f03bb49526c..37dcbc53e2e 100644 --- a/lib/gitlab/database/schema_validation/validators/extra_triggers.rb +++ b/lib/gitlab/database/schema_validation/validators/extra_triggers.rb @@ -5,11 +5,13 @@ module Gitlab module SchemaValidation module Validators class ExtraTriggers < BaseValidator + ERROR_MESSAGE = "The trigger %s is present in the database, but not in the structure.sql file" + def execute - database.triggers.filter_map do |trigger| - next if structure_sql.trigger_exists?(trigger.name) + database.triggers.filter_map do |database_trigger| + next if structure_sql.trigger_exists?(database_trigger.name) - build_inconsistency(self.class, trigger) + build_inconsistency(self.class, nil, database_trigger) end end end diff --git a/lib/gitlab/database/schema_validation/validators/missing_indexes.rb b/lib/gitlab/database/schema_validation/validators/missing_indexes.rb index ac0ea0152ba..7f81aaccf0f 100644 --- a/lib/gitlab/database/schema_validation/validators/missing_indexes.rb +++ b/lib/gitlab/database/schema_validation/validators/missing_indexes.rb @@ -5,11 +5,13 @@ module Gitlab module SchemaValidation module Validators class MissingIndexes < BaseValidator + ERROR_MESSAGE = "The index %s is missing from the database" + def execute - structure_sql.indexes.filter_map do |index| - next if database.index_exists?(index.name) + structure_sql.indexes.filter_map do |structure_sql_index| + next if database.index_exists?(structure_sql_index.name) - build_inconsistency(self.class, index) + build_inconsistency(self.class, structure_sql_index, nil) end end end diff --git a/lib/gitlab/database/schema_validation/validators/missing_table_columns.rb b/lib/gitlab/database/schema_validation/validators/missing_table_columns.rb new file mode 100644 index 00000000000..b49d53823ee --- /dev/null +++ b/lib/gitlab/database/schema_validation/validators/missing_table_columns.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module Validators + class MissingTableColumns < BaseValidator + ERROR_MESSAGE = "The table %s has columns missing from the database" + + def execute + structure_sql.tables.filter_map do |structure_sql_table| + table_name = structure_sql_table.name + database_table = database.fetch_table_by_name(table_name) + + next unless database_table + + inconsistencies = structure_sql_table.columns.filter_map do |structure_table_column| + next if database_table.column_exists?(structure_table_column.name) + + structure_table_column + end + + if inconsistencies.any? + build_inconsistency(self.class, nil, SchemaObjects::Table.new(table_name, inconsistencies)) + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/validators/missing_tables.rb b/lib/gitlab/database/schema_validation/validators/missing_tables.rb new file mode 100644 index 00000000000..f1c9383487d --- /dev/null +++ b/lib/gitlab/database/schema_validation/validators/missing_tables.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module SchemaValidation + module Validators + class MissingTables < BaseValidator + ERROR_MESSAGE = "The table %s is missing from the database" + + def execute + structure_sql.tables.filter_map do |structure_sql_table| + next if database.table_exists?(structure_sql_table.name) + + build_inconsistency(self.class, structure_sql_table, nil) + end + end + end + end + end + end +end diff --git a/lib/gitlab/database/schema_validation/validators/missing_triggers.rb b/lib/gitlab/database/schema_validation/validators/missing_triggers.rb index c7137c68c1c..36236463bbf 100644 --- a/lib/gitlab/database/schema_validation/validators/missing_triggers.rb +++ b/lib/gitlab/database/schema_validation/validators/missing_triggers.rb @@ -5,11 +5,13 @@ module Gitlab module SchemaValidation module Validators class MissingTriggers < BaseValidator + ERROR_MESSAGE = "The trigger %s is missing from the database" + def execute - structure_sql.triggers.filter_map do |index| - next if database.trigger_exists?(index.name) + structure_sql.triggers.filter_map do |structure_sql_trigger| + next if database.trigger_exists?(structure_sql_trigger.name) - build_inconsistency(self.class, index) + build_inconsistency(self.class, structure_sql_trigger, nil) end end end diff --git a/lib/gitlab/database/tables_locker.rb b/lib/gitlab/database/tables_locker.rb index 42a2c5c02f7..1b6ab3fb24b 100644 --- a/lib/gitlab/database/tables_locker.rb +++ b/lib/gitlab/database/tables_locker.rb @@ -8,6 +8,7 @@ module Gitlab def initialize(logger: nil, dry_run: false) @logger = logger @dry_run = dry_run + @result = [] end def unlock_writes @@ -19,6 +20,8 @@ module Gitlab unlock_writes_on_table(table_name, connection, database_name) end end + + @result end # It locks the tables on the database where they don't belong. Also it unlocks the tables @@ -38,25 +41,27 @@ module Gitlab end end end + + @result end private # Unlocks the writes on the table and its partitions def unlock_writes_on_table(table_name, connection, database_name) - lock_writes_manager(table_name, connection, database_name).unlock_writes + @result << lock_writes_manager(table_name, connection, database_name).unlock_writes table_attached_partitions(table_name, connection) do |postgres_partition| - lock_writes_manager(postgres_partition.identifier, connection, database_name).unlock_writes + @result << lock_writes_manager(postgres_partition.identifier, connection, database_name).unlock_writes end end # It locks the writes on the table and its partitions def lock_writes_on_table(table_name, connection, database_name) - lock_writes_manager(table_name, connection, database_name).lock_writes + @result << lock_writes_manager(table_name, connection, database_name).lock_writes table_attached_partitions(table_name, connection) do |postgres_partition| - lock_writes_manager(postgres_partition.identifier, connection, database_name).lock_writes + @result << lock_writes_manager(postgres_partition.identifier, connection, database_name).lock_writes end end diff --git a/lib/gitlab/database_importers/work_items/base_type_importer.rb b/lib/gitlab/database_importers/work_items/base_type_importer.rb index 85ac816f712..9e5d43f1767 100644 --- a/lib/gitlab/database_importers/work_items/base_type_importer.rb +++ b/lib/gitlab/database_importers/work_items/base_type_importer.rb @@ -19,7 +19,9 @@ module Gitlab status: 'Status', requirement_legacy: 'Requirement legacy', test_reports: 'Test reports', - notifications: 'Notifications' + notifications: 'Notifications', + current_user_todos: "Current user todos", + award_emoji: 'Award emoji' }.freeze WIDGETS_FOR_TYPE = { @@ -34,18 +36,24 @@ module Gitlab :iteration, :weight, :health_status, - :notifications + :notifications, + :current_user_todos, + :award_emoji ], incident: [ :description, :hierarchy, :notes, - :notifications + :notifications, + :current_user_todos, + :award_emoji ], test_case: [ :description, :notes, - :notifications + :notifications, + :current_user_todos, + :award_emoji ], requirement: [ :description, @@ -53,7 +61,9 @@ module Gitlab :status, :requirement_legacy, :test_reports, - :notifications + :notifications, + :current_user_todos, + :award_emoji ], task: [ :assignees, @@ -65,7 +75,9 @@ module Gitlab :notes, :iteration, :weight, - :notifications + :notifications, + :current_user_todos, + :award_emoji ], objective: [ :assignees, @@ -76,7 +88,9 @@ module Gitlab :notes, :health_status, :progress, - :notifications + :notifications, + :current_user_todos, + :award_emoji ], key_result: [ :assignees, @@ -87,7 +101,9 @@ module Gitlab :notes, :health_status, :progress, - :notifications + :notifications, + :current_user_todos, + :award_emoji ] }.freeze diff --git a/lib/gitlab/diff/highlight.rb b/lib/gitlab/diff/highlight.rb index 225b4f7cf86..95ea3fe9f0f 100644 --- a/lib/gitlab/diff/highlight.rb +++ b/lib/gitlab/diff/highlight.rb @@ -24,15 +24,15 @@ module Gitlab end def highlight - populate_marker_ranges if Feature.enabled?(:use_marker_ranges, project) + populate_marker_ranges - @diff_lines.map.with_index do |diff_line, index| + @diff_lines.map do |diff_line| diff_line = diff_line.dup # ignore highlighting for "match" lines next diff_line if diff_line.meta? rich_line = apply_syntax_highlight(diff_line) - rich_line = apply_marker_ranges_highlight(diff_line, rich_line, index) + rich_line = apply_marker_ranges_highlight(diff_line, rich_line) diff_line.rich_text = rich_line @@ -60,12 +60,8 @@ module Gitlab highlight_line(diff_line) || ERB::Util.html_escape(diff_line.text) end - def apply_marker_ranges_highlight(diff_line, rich_line, index) - marker_ranges = if Feature.enabled?(:use_marker_ranges, project) - diff_line.marker_ranges - else - inline_diffs[index] - end + def apply_marker_ranges_highlight(diff_line, rich_line) + marker_ranges = diff_line.marker_ranges return rich_line if marker_ranges.blank? @@ -134,12 +130,6 @@ module Gitlab end end - # Deprecated: https://gitlab.com/gitlab-org/gitlab/-/issues/324638 - # ------------------------------------------------------------------------ - def inline_diffs - @inline_diffs ||= InlineDiff.for_lines(@raw_lines) - end - def old_lines @old_lines ||= highlighted_blob_lines(diff_file.old_blob) end diff --git a/lib/gitlab/diff/highlight_cache.rb b/lib/gitlab/diff/highlight_cache.rb index 5128b09aef4..63a437b021d 100644 --- a/lib/gitlab/diff/highlight_cache.rb +++ b/lib/gitlab/diff/highlight_cache.rb @@ -71,7 +71,6 @@ module Gitlab strong_memoize(:redis_key) do options = [ diff_options, - Feature.enabled?(:use_marker_ranges, diffable.project), Feature.enabled?(:diff_line_syntax_highlighting, diffable.project) ] options_for_key = OpenSSL::Digest::SHA256.hexdigest(options.join) diff --git a/lib/gitlab/diff/inline_diff.rb b/lib/gitlab/diff/inline_diff.rb index 802da50cfc6..7f760a23f45 100644 --- a/lib/gitlab/diff/inline_diff.rb +++ b/lib/gitlab/diff/inline_diff.rb @@ -17,27 +17,6 @@ module Gitlab CharDiff.new(old_line, new_line).changed_ranges(offset: offset) end - - # Deprecated: https://gitlab.com/gitlab-org/gitlab/-/issues/324638 - class << self - def for_lines(lines) - pair_selector = Gitlab::Diff::PairSelector.new(lines) - - inline_diffs = [] - - pair_selector.each do |old_index, new_index| - old_line = lines[old_index] - new_line = lines[new_index] - - old_diffs, new_diffs = new(old_line, new_line, offset: 1).inline_diffs - - inline_diffs[old_index] = old_diffs - inline_diffs[new_index] = new_diffs - end - - inline_diffs - end - end end end end diff --git a/lib/gitlab/email/hook/silent_mode_interceptor.rb b/lib/gitlab/email/hook/silent_mode_interceptor.rb new file mode 100644 index 00000000000..56f94119472 --- /dev/null +++ b/lib/gitlab/email/hook/silent_mode_interceptor.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module Gitlab + module Email + module Hook + class SilentModeInterceptor + def self.delivering_email(message) + if Gitlab::CurrentSettings.silent_mode_enabled? + message.perform_deliveries = false + + Gitlab::AppJsonLogger.info( + message: "SilentModeInterceptor prevented sending mail", + mail_subject: message.subject, + silent_mode_enabled: true + ) + else + Gitlab::AppJsonLogger.debug( + message: "SilentModeInterceptor did nothing", + mail_subject: message.subject, + silent_mode_enabled: false + ) + end + end + end + end + end +end diff --git a/lib/gitlab/email/html_parser.rb b/lib/gitlab/email/html_parser.rb index 10dbedbb464..693048adabf 100644 --- a/lib/gitlab/email/html_parser.rb +++ b/lib/gitlab/email/html_parser.rb @@ -34,11 +34,7 @@ module Gitlab end def filtered_text - @filtered_text ||= if Feature.enabled?(:service_desk_html_to_text_email_handler) - ::Gitlab::Email::HtmlToMarkdownParser.convert(filtered_html) - else - Html2Text.convert(filtered_html) - end + @filtered_text ||= ::Gitlab::Email::HtmlToMarkdownParser.convert(filtered_html) end end end diff --git a/lib/gitlab/email/incoming_email.rb b/lib/gitlab/email/incoming_email.rb new file mode 100644 index 00000000000..a0a01ae0d70 --- /dev/null +++ b/lib/gitlab/email/incoming_email.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Gitlab + module Email + module IncomingEmail + class << self + include Gitlab::Email::Common + + def config + incoming_email_config + end + + def key_from_address(address, wildcard_address: nil) + wildcard_address ||= config.address + regex = address_regex(wildcard_address) + return unless regex + + match = address.match(regex) + return unless match + + match[1] + end + + private + + def address_regex(wildcard_address) + return unless wildcard_address + + regex = Regexp.escape(wildcard_address) + regex = regex.sub(Regexp.escape(WILDCARD_PLACEHOLDER), '(.+)') + Regexp.new(/\A<?#{regex}>?\z/).freeze + end + end + end + end +end diff --git a/lib/gitlab/email/receiver.rb b/lib/gitlab/email/receiver.rb index 664f0a1bb4a..51d250ea98c 100644 --- a/lib/gitlab/email/receiver.rb +++ b/lib/gitlab/email/receiver.rb @@ -110,7 +110,7 @@ module Gitlab when String # Handle emails from clients which append with commas, # example clients are Microsoft exchange and iOS app - Gitlab::IncomingEmail.scan_fallback_references(references) + email_class.scan_fallback_references(references) when nil [] end @@ -203,7 +203,7 @@ module Gitlab end def email_class - Gitlab::IncomingEmail + Gitlab::Email::IncomingEmail end end end diff --git a/lib/gitlab/email/service_desk_email.rb b/lib/gitlab/email/service_desk_email.rb new file mode 100644 index 00000000000..4ea1c077327 --- /dev/null +++ b/lib/gitlab/email/service_desk_email.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module Gitlab + module Email + module ServiceDeskEmail + class << self + include Gitlab::Email::Common + + def config + Gitlab.config.service_desk_email + end + + def key_from_address(address) + wildcard_address = config&.address + return unless wildcard_address + + Gitlab::Email::IncomingEmail.key_from_address(address, wildcard_address: wildcard_address) + end + + def address_for_key(key) + return if config.address.blank? + + config.address.sub(WILDCARD_PLACEHOLDER, key) + end + end + end + end +end diff --git a/lib/gitlab/email/service_desk_receiver.rb b/lib/gitlab/email/service_desk_receiver.rb index 6c6eb3b0a65..e286cf1f68c 100644 --- a/lib/gitlab/email/service_desk_receiver.rb +++ b/lib/gitlab/email/service_desk_receiver.rb @@ -12,7 +12,7 @@ module Gitlab end def email_class - ::Gitlab::ServiceDeskEmail + ::Gitlab::Email::ServiceDeskEmail end end end diff --git a/lib/gitlab/emoji.rb b/lib/gitlab/emoji.rb index 2b36b1c99bd..7d47bfe88fe 100644 --- a/lib/gitlab/emoji.rb +++ b/lib/gitlab/emoji.rb @@ -15,20 +15,6 @@ module Gitlab Rails.root.join("public/-/emojis/#{EMOJI_VERSION}") end - def emoji_image_tag(name, src) - image_options = { - class: 'emoji', - src: src, - title: ":#{name}:", - alt: ":#{name}:", - height: 20, - width: 20, - align: 'absmiddle' - } - - ActionController::Base.helpers.tag(:img, image_options) - end - # CSS sprite fallback takes precedence over image fallback # @param [TanukiEmoji::Character] emoji # @param [Hash] options diff --git a/lib/gitlab/encrypted_incoming_email_command.rb b/lib/gitlab/encrypted_incoming_email_command.rb index a18382439d6..05fc7cac000 100644 --- a/lib/gitlab/encrypted_incoming_email_command.rb +++ b/lib/gitlab/encrypted_incoming_email_command.rb @@ -8,7 +8,7 @@ module Gitlab class << self def encrypted_secrets - Gitlab::IncomingEmail.encrypted_secrets + Gitlab::Email::IncomingEmail.encrypted_secrets end def encrypted_file_template diff --git a/lib/gitlab/encrypted_service_desk_email_command.rb b/lib/gitlab/encrypted_service_desk_email_command.rb index ece6da7c1b3..1a0317e0da9 100644 --- a/lib/gitlab/encrypted_service_desk_email_command.rb +++ b/lib/gitlab/encrypted_service_desk_email_command.rb @@ -8,7 +8,7 @@ module Gitlab class << self def encrypted_secrets - Gitlab::ServiceDeskEmail.encrypted_secrets + Gitlab::Email::ServiceDeskEmail.encrypted_secrets end def encrypted_file_template diff --git a/lib/gitlab/event_store.rb b/lib/gitlab/event_store.rb index 023c8ace4d9..c017396c8e8 100644 --- a/lib/gitlab/event_store.rb +++ b/lib/gitlab/event_store.rb @@ -60,6 +60,9 @@ module Gitlab store.subscribe ::MergeRequests::CreateApprovalNoteWorker, to: ::MergeRequests::ApprovedEvent store.subscribe ::MergeRequests::ResolveTodosAfterApprovalWorker, to: ::MergeRequests::ApprovedEvent store.subscribe ::MergeRequests::ExecuteApprovalHooksWorker, to: ::MergeRequests::ApprovedEvent + store.subscribe ::Ml::ExperimentTracking::AssociateMlCandidateToPackageWorker, + to: ::Packages::PackageCreatedEvent, + if: -> (event) { ::Ml::ExperimentTracking::AssociateMlCandidateToPackageWorker.handles_event?(event) } end private_class_method :configure! end diff --git a/lib/gitlab/favicon.rb b/lib/gitlab/favicon.rb index 8e48b482462..f4633473a95 100644 --- a/lib/gitlab/favicon.rb +++ b/lib/gitlab/favicon.rb @@ -24,7 +24,7 @@ module Gitlab 'favicon-blue.png' end - def status_overlay(status_name) + def ci_status_overlay(status_name) path = File.join( 'ci_favicons', "#{status_name}.png" @@ -33,6 +33,15 @@ module Gitlab ActionController::Base.helpers.image_path(path, host: host) end + def mr_status_overlay(status_name) + path = File.join( + 'mr_favicons', + "#{status_name}.png" + ) + + ActionController::Base.helpers.image_path(path, host: host) + end + def available_status_names @available_status_names ||= Dir.glob(Rails.root.join('app', 'assets', 'images', 'ci_favicons', '*.png')) .map { |file| File.basename(file, '.png') } diff --git a/lib/gitlab/git/blame_mode.rb b/lib/gitlab/git/blame_mode.rb new file mode 100644 index 00000000000..d8fc8fece06 --- /dev/null +++ b/lib/gitlab/git/blame_mode.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +module Gitlab + module Git + class BlameMode + def initialize(project, params) + @project = project + @params = params + end + + def streaming_supported? + Feature.enabled?(:blame_page_streaming, project) + end + + def streaming? + return false unless streaming_supported? + + Gitlab::Utils.to_boolean(params[:streaming], default: false) + end + + def pagination? + return false if streaming? + return false if Gitlab::Utils.to_boolean(params[:no_pagination], default: false) + + Feature.enabled?(:blame_page_pagination, project) + end + + def full? + !streaming? && !pagination? + end + + private + + attr_reader :project, :params + end + end +end diff --git a/lib/gitlab/git/blame_pagination.rb b/lib/gitlab/git/blame_pagination.rb new file mode 100644 index 00000000000..6bf29859b14 --- /dev/null +++ b/lib/gitlab/git/blame_pagination.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +module Gitlab + module Git + class BlamePagination + include Gitlab::Utils::StrongMemoize + + PAGINATION_PER_PAGE = 1000 + STREAMING_FIRST_PAGE_SIZE = 200 + STREAMING_PER_PAGE = 2000 + + def initialize(blob, blame_mode, params) + @blob = blob + @blame_mode = blame_mode + @params = params + end + + def page + page = params.fetch(:page, 1).to_i + + return 1 if page < 1 + + page + end + strong_memoize_attr :page + + def per_page + blame_mode.streaming? ? STREAMING_PER_PAGE : PAGINATION_PER_PAGE + end + strong_memoize_attr :per_page + + def total_pages + total = (blob_lines_count.to_f / per_page).ceil + return total unless blame_mode.streaming? + + ([blob_lines_count - STREAMING_FIRST_PAGE_SIZE, 0].max.to_f / per_page).ceil + 1 + end + strong_memoize_attr :total_pages + + def total_extra_pages + [total_pages - 1, 0].max + end + strong_memoize_attr :total_extra_pages + + def paginator + return if blame_mode.streaming? || blame_mode.full? + + Kaminari.paginate_array([], total_count: blob_lines_count, limit: per_page) + .tap { |pagination| pagination.max_paginates_per(per_page) } + .page(page) + end + + def blame_range + return if blame_mode.full? + + first_line = ((page - 1) * per_page) + 1 + + if blame_mode.streaming? + return 1..STREAMING_FIRST_PAGE_SIZE if page == 1 + + first_line = STREAMING_FIRST_PAGE_SIZE + ((page - 2) * per_page) + 1 + end + + last_line = (first_line + per_page).to_i - 1 + + first_line..last_line + end + + private + + attr_reader :blob, :blame_mode, :params + + def blob_lines_count + @blob_lines_count ||= blob.data.lines.count + end + end + end +end diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb index 40cb4521f6a..80d0fd17568 100644 --- a/lib/gitlab/git/repository.rb +++ b/lib/gitlab/git/repository.rb @@ -54,8 +54,6 @@ module Gitlab # state. alias_method :object_pool_remote_name, :gl_repository - # This initializer method is only used on the client side (gitlab-ce). - # Gitaly-ruby uses a different initializer. def initialize(storage, relative_path, gl_repository, gl_project_path, container: nil) @storage = storage @relative_path = relative_path @@ -1146,7 +1144,7 @@ module Gitlab def checksum # The exists? RPC is much cheaper, so we perform this request first - raise NoRepository, "Repository does not exists" unless exists? + raise NoRepository, "Repository does not exist" unless exists? gitaly_repository_client.calculate_checksum rescue GRPC::NotFound diff --git a/lib/gitlab/git_ref_validator.rb b/lib/gitlab/git_ref_validator.rb index f4d4cebc096..7867e1b8c37 100644 --- a/lib/gitlab/git_ref_validator.rb +++ b/lib/gitlab/git_ref_validator.rb @@ -12,10 +12,10 @@ module Gitlab # Validates a given name against the git reference specification # # Returns true for a valid reference name, false otherwise - def validate(ref_name) + def validate(ref_name, skip_head_ref_check: false) return false if ref_name.to_s.empty? # #blank? raises an ArgumentError for invalid encodings return false if ref_name.start_with?(*(EXPANDED_PREFIXES + DISALLOWED_PREFIXES)) - return false if ref_name == 'HEAD' + return false if ref_name == 'HEAD' && !skip_head_ref_check begin Rugged::Reference.valid_name?("refs/heads/#{ref_name}") diff --git a/lib/gitlab/github_import/bulk_importing.rb b/lib/gitlab/github_import/bulk_importing.rb index 0c91eff1d10..d16f4d7587b 100644 --- a/lib/gitlab/github_import/bulk_importing.rb +++ b/lib/gitlab/github_import/bulk_importing.rb @@ -27,8 +27,13 @@ module Gitlab build_record = model.new(attrs) if build_record.invalid? - log_error(object[:id], build_record.errors.full_messages) - errors << build_record.errors + github_identifiers = github_identifiers(object) + + log_error(github_identifiers, build_record.errors.full_messages) + errors << { + validation_errors: build_record.errors, + github_identifiers: github_identifiers + } next end @@ -53,17 +58,18 @@ module Gitlab raise NotImplementedError end - def bulk_insert_failures(validation_errors) - rows = validation_errors.map do |error| + def bulk_insert_failures(errors) + rows = errors.map do |error| correlation_id_value = Labkit::Correlation::CorrelationId.current_or_new_id { source: self.class.name, exception_class: 'ActiveRecord::RecordInvalid', - exception_message: error.full_messages.first.truncate(255), + exception_message: error[:validation_errors].full_messages.first.truncate(255), correlation_id_value: correlation_id_value, retry_count: nil, - created_at: Time.zone.now + created_at: Time.zone.now, + external_identifiers: error[:github_identifiers] } end @@ -88,15 +94,19 @@ module Gitlab ) end - def log_error(object_id, messages) + def log_error(github_identifiers, messages) Gitlab::Import::Logger.error( import_type: :github, project_id: project.id, importer: self.class.name, message: messages, - github_identifier: object_id + github_identifiers: github_identifiers ) end + + def github_identifiers(object) + raise NotImplementedError + end end end end diff --git a/lib/gitlab/github_import/importer/attachments/issues_importer.rb b/lib/gitlab/github_import/importer/attachments/issues_importer.rb index 090bfb4a098..c8f0b59fd18 100644 --- a/lib/gitlab/github_import/importer/attachments/issues_importer.rb +++ b/lib/gitlab/github_import/importer/attachments/issues_importer.rb @@ -24,7 +24,7 @@ module Gitlab private def collection - project.issues.select(:id, :description) + project.issues.select(:id, :description, :iid) end def ordering_column diff --git a/lib/gitlab/github_import/importer/attachments/merge_requests_importer.rb b/lib/gitlab/github_import/importer/attachments/merge_requests_importer.rb index f41071b1785..cd3a327a846 100644 --- a/lib/gitlab/github_import/importer/attachments/merge_requests_importer.rb +++ b/lib/gitlab/github_import/importer/attachments/merge_requests_importer.rb @@ -24,7 +24,7 @@ module Gitlab private def collection - project.merge_requests.select(:id, :description) + project.merge_requests.select(:id, :description, :iid) end def ordering_column diff --git a/lib/gitlab/github_import/importer/attachments/releases_importer.rb b/lib/gitlab/github_import/importer/attachments/releases_importer.rb index feaa69eff71..7d6dbeb901e 100644 --- a/lib/gitlab/github_import/importer/attachments/releases_importer.rb +++ b/lib/gitlab/github_import/importer/attachments/releases_importer.rb @@ -24,7 +24,7 @@ module Gitlab private def collection - project.releases.select(:id, :description) + project.releases.select(:id, :description, :tag) end end end diff --git a/lib/gitlab/github_import/importer/labels_importer.rb b/lib/gitlab/github_import/importer/labels_importer.rb index d5d1cd28b7c..4554b932520 100644 --- a/lib/gitlab/github_import/importer/labels_importer.rb +++ b/lib/gitlab/github_import/importer/labels_importer.rb @@ -53,9 +53,18 @@ module Gitlab :label end + private + def model Label end + + def github_identifiers(label) + { + title: label[:name], + object_type: object_type + } + end end end end diff --git a/lib/gitlab/github_import/importer/milestones_importer.rb b/lib/gitlab/github_import/importer/milestones_importer.rb index 560fbdc66e3..cd6d450f15b 100644 --- a/lib/gitlab/github_import/importer/milestones_importer.rb +++ b/lib/gitlab/github_import/importer/milestones_importer.rb @@ -57,9 +57,19 @@ module Gitlab :milestone end + private + def model Milestone end + + def github_identifiers(milestone) + { + iid: milestone[:number], + title: milestone[:title], + object_type: object_type + } + end end end end diff --git a/lib/gitlab/github_import/importer/note_attachments_importer.rb b/lib/gitlab/github_import/importer/note_attachments_importer.rb index a84fcd253ef..266ee2938ba 100644 --- a/lib/gitlab/github_import/importer/note_attachments_importer.rb +++ b/lib/gitlab/github_import/importer/note_attachments_importer.rb @@ -6,7 +6,7 @@ module Gitlab class NoteAttachmentsImporter attr_reader :note_text, :project - # note_text - An instance of `NoteText`. + # note_text - An instance of `Gitlab::GithubImport::Representation::NoteText`. # project - An instance of `Project`. # client - An instance of `Gitlab::GithubImport::Client`. def initialize(note_text, project, _client = nil) diff --git a/lib/gitlab/github_import/importer/pull_request_merged_by_importer.rb b/lib/gitlab/github_import/importer/pull_request_merged_by_importer.rb index f05aa26a449..51a72a80268 100644 --- a/lib/gitlab/github_import/importer/pull_request_merged_by_importer.rb +++ b/lib/gitlab/github_import/importer/pull_request_merged_by_importer.rb @@ -17,11 +17,7 @@ module Gitlab def execute user_finder = GithubImport::UserFinder.new(project, client) - gitlab_user_id = begin - user_finder.user_id_for(pull_request.merged_by) - rescue ::Octokit::NotFound - nil - end + gitlab_user_id = user_finder.user_id_for(pull_request.merged_by) metrics_upsert(gitlab_user_id) diff --git a/lib/gitlab/github_import/importer/pull_request_review_importer.rb b/lib/gitlab/github_import/importer/pull_request_review_importer.rb index b1e259fe940..a711f83ce92 100644 --- a/lib/gitlab/github_import/importer/pull_request_review_importer.rb +++ b/lib/gitlab/github_import/importer/pull_request_review_importer.rb @@ -17,11 +17,7 @@ module Gitlab def execute user_finder = GithubImport::UserFinder.new(project, client) - gitlab_user_id = begin - user_finder.user_id_for(review.author) - rescue ::Octokit::NotFound - nil - end + gitlab_user_id = user_finder.user_id_for(review.author) if gitlab_user_id add_review_note!(gitlab_user_id) diff --git a/lib/gitlab/github_import/importer/pull_requests/review_requests_importer.rb b/lib/gitlab/github_import/importer/pull_requests/review_requests_importer.rb index c5d8da3be1c..0a92aee801d 100644 --- a/lib/gitlab/github_import/importer/pull_requests/review_requests_importer.rb +++ b/lib/gitlab/github_import/importer/pull_requests/review_requests_importer.rb @@ -18,6 +18,7 @@ module Gitlab review_requests = client.pull_request_review_requests(repo, merge_request.iid) review_requests[:merge_request_id] = merge_request.id + review_requests[:merge_request_iid] = merge_request.iid yield review_requests mark_merge_request_imported(merge_request) diff --git a/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb b/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb index 543c29a21a0..854e5a50fb1 100644 --- a/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb +++ b/lib/gitlab/github_import/importer/pull_requests_reviews_importer.rb @@ -55,6 +55,7 @@ module Gitlab Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched) review[:merge_request_id] = merge_request.id + review[:merge_request_iid] = merge_request.iid yield(review) mark_as_imported(review) diff --git a/lib/gitlab/github_import/importer/releases_importer.rb b/lib/gitlab/github_import/importer/releases_importer.rb index 62d579fda08..2f210dafd0c 100644 --- a/lib/gitlab/github_import/importer/releases_importer.rb +++ b/lib/gitlab/github_import/importer/releases_importer.rb @@ -73,6 +73,13 @@ module Gitlab def model Release end + + def github_identifiers(release) + { + tag: release[:tag_name], + object_type: object_type + } + end end end end diff --git a/lib/gitlab/github_import/representation/collaborator.rb b/lib/gitlab/github_import/representation/collaborator.rb index 55f13593f4f..fb58a572151 100644 --- a/lib/gitlab/github_import/representation/collaborator.rb +++ b/lib/gitlab/github_import/representation/collaborator.rb @@ -34,7 +34,10 @@ module Gitlab end def github_identifiers - { id: id } + { + id: id, + login: login + } end end end diff --git a/lib/gitlab/github_import/representation/issue.rb b/lib/gitlab/github_import/representation/issue.rb index e878aeaf3b9..95a7c5ebf4b 100644 --- a/lib/gitlab/github_import/representation/issue.rb +++ b/lib/gitlab/github_import/representation/issue.rb @@ -79,7 +79,8 @@ module Gitlab def github_identifiers { iid: iid, - issuable_type: issuable_type + issuable_type: issuable_type, + title: title } end end diff --git a/lib/gitlab/github_import/representation/issue_event.rb b/lib/gitlab/github_import/representation/issue_event.rb index 39a23c016ce..35eb4006f37 100644 --- a/lib/gitlab/github_import/representation/issue_event.rb +++ b/lib/gitlab/github_import/representation/issue_event.rb @@ -20,7 +20,11 @@ module Gitlab end def github_identifiers - { id: id } + { + id: id, + iid: issuable_id, + event: event + } end def issuable_type diff --git a/lib/gitlab/github_import/representation/lfs_object.rb b/lib/gitlab/github_import/representation/lfs_object.rb index cd614db2161..716e77bf401 100644 --- a/lib/gitlab/github_import/representation/lfs_object.rb +++ b/lib/gitlab/github_import/representation/lfs_object.rb @@ -33,7 +33,8 @@ module Gitlab def github_identifiers { - oid: oid + oid: oid, + size: size } end end diff --git a/lib/gitlab/github_import/representation/note_text.rb b/lib/gitlab/github_import/representation/note_text.rb index 505d7d805d3..70dd242303a 100644 --- a/lib/gitlab/github_import/representation/note_text.rb +++ b/lib/gitlab/github_import/representation/note_text.rb @@ -16,35 +16,35 @@ module Gitlab attr_reader :attributes - expose_attribute :record_db_id, :record_type, :text - - class << self - # Builds a note text representation from DB record of Note or Release. - # - # record - An instance of `Note`, `Release`, `Issue`, `MergeRequest` model - def from_db_record(record) - check_record_class!(record) - - record_type = record.class.name - # only column for note is different along MODELS_ALLOWLIST - text = record.is_a?(::Note) ? record.note : record.description - new( - record_db_id: record.id, - record_type: record_type, - text: text - ) - end + expose_attribute :record_db_id, :record_type, :text, :iid, :tag, :noteable_type - def from_json_hash(raw_hash) - new Representation.symbolize_hash(raw_hash) - end + # Builds a note text representation from DB record of Note or Release. + # + # record - An instance of `Note`, `Release`, `Issue`, `MergeRequest` model + def self.from_db_record(record) + check_record_class!(record) - private + record_type = record.class.name + # only column for note is different along MODELS_ALLOWLIST + text = record.is_a?(::Note) ? record.note : record.description + new( + record_db_id: record.id, + record_type: record_type, + text: text, + iid: record.try(:iid), + tag: record.try(:tag), + noteable_type: record.try(:noteable_type) + ) + end - def check_record_class!(record) - raise ModelNotSupported, record.class.name if MODELS_ALLOWLIST.exclude?(record.class) - end + def self.from_json_hash(raw_hash) + new Representation.symbolize_hash(raw_hash) + end + + def self.check_record_class!(record) + raise ModelNotSupported, record.class.name if MODELS_ALLOWLIST.exclude?(record.class) end + private_class_method :check_record_class! # attributes - A Hash containing the event details. The keys of this # Hash (and any nested hashes) must be symbols. @@ -53,7 +53,22 @@ module Gitlab end def github_identifiers - { db_id: record_db_id } + { + db_id: record_db_id + }.merge(record_type_specific_attribute) + end + + private + + def record_type_specific_attribute + case record_type + when ::Release.name + { tag: tag } + when ::Issue.name, ::MergeRequest.name + { noteable_iid: iid } + when ::Note.name + { noteable_type: noteable_type } + end end end end diff --git a/lib/gitlab/github_import/representation/pull_request.rb b/lib/gitlab/github_import/representation/pull_request.rb index 4b8ae1f8eab..f26fa953773 100644 --- a/lib/gitlab/github_import/representation/pull_request.rb +++ b/lib/gitlab/github_import/representation/pull_request.rb @@ -111,7 +111,8 @@ module Gitlab def github_identifiers { iid: iid, - issuable_type: issuable_type + issuable_type: issuable_type, + title: title } end end diff --git a/lib/gitlab/github_import/representation/pull_request_review.rb b/lib/gitlab/github_import/representation/pull_request_review.rb index 8fb57ae89a4..0c6e281cd6d 100644 --- a/lib/gitlab/github_import/representation/pull_request_review.rb +++ b/lib/gitlab/github_import/representation/pull_request_review.rb @@ -9,7 +9,7 @@ module Gitlab attr_reader :attributes - expose_attribute :author, :note, :review_type, :submitted_at, :merge_request_id, :review_id + expose_attribute :author, :note, :review_type, :submitted_at, :merge_request_id, :merge_request_iid, :review_id # Builds a PullRequestReview from a GitHub API response. # @@ -19,6 +19,7 @@ module Gitlab new( merge_request_id: review[:merge_request_id], + merge_request_iid: review[:merge_request_iid], author: user, note: review[:body], review_type: review[:state], @@ -49,8 +50,8 @@ module Gitlab def github_identifiers { - review_id: review_id, - merge_request_id: merge_request_id + merge_request_iid: merge_request_iid, + review_id: review_id } end end diff --git a/lib/gitlab/github_import/representation/pull_requests/review_requests.rb b/lib/gitlab/github_import/representation/pull_requests/review_requests.rb index 692004c4460..a6ec1d3178b 100644 --- a/lib/gitlab/github_import/representation/pull_requests/review_requests.rb +++ b/lib/gitlab/github_import/representation/pull_requests/review_requests.rb @@ -10,7 +10,7 @@ module Gitlab attr_reader :attributes - expose_attribute :merge_request_id, :users + expose_attribute :merge_request_id, :merge_request_iid, :users class << self # Builds a list of requested reviewers from a GitHub API response. @@ -24,6 +24,7 @@ module Gitlab new( merge_request_id: review_requests[:merge_request_id], + merge_request_iid: review_requests[:merge_request_iid], users: users ) end @@ -37,7 +38,10 @@ module Gitlab end def github_identifiers - { merge_request_id: merge_request_id } + { + merge_request_iid: merge_request_iid, + requested_reviewers: users.pluck(:login) # rubocop: disable CodeReuse/ActiveRecord + } end end end diff --git a/lib/gitlab/github_import/user_finder.rb b/lib/gitlab/github_import/user_finder.rb index b8751def08f..dd71edbd205 100644 --- a/lib/gitlab/github_import/user_finder.rb +++ b/lib/gitlab/github_import/user_finder.rb @@ -28,6 +28,9 @@ module Gitlab EMAIL_FOR_USERNAME_CACHE_KEY = 'github-import/user-finder/email-for-username/%s' + # The base cache key to use for caching inexistence of GitHub usernames. + INEXISTENCE_OF_GITHUB_USERNAME_CACHE_KEY = 'github-import/user-finder/inexistence-of-username/%s' + # project - An instance of `Project` # client - An instance of `Gitlab::GithubImport::Client` def initialize(project, client) @@ -113,12 +116,15 @@ module Gitlab cache_key = EMAIL_FOR_USERNAME_CACHE_KEY % username email = Gitlab::Cache::Import::Caching.read(cache_key) - unless email + if email.blank? && !github_username_inexists?(username) user = client.user(username) email = Gitlab::Cache::Import::Caching.write(cache_key, user[:email], timeout: timeout(user[:email])) if user end email + rescue ::Octokit::NotFound + cache_github_username_inexistence(username) + nil end def cached_id_for_github_id(id) @@ -190,6 +196,18 @@ module Gitlab Gitlab::Cache::Import::Caching::SHORTER_TIMEOUT end end + + def github_username_inexists?(username) + cache_key = INEXISTENCE_OF_GITHUB_USERNAME_CACHE_KEY % username + + Gitlab::Cache::Import::Caching.read(cache_key) == 'true' + end + + def cache_github_username_inexistence(username) + cache_key = INEXISTENCE_OF_GITHUB_USERNAME_CACHE_KEY % username + + Gitlab::Cache::Import::Caching.write(cache_key, true) + end end end end diff --git a/lib/gitlab/gl_repository.rb b/lib/gitlab/gl_repository.rb index d123989ef8e..efdb205b8eb 100644 --- a/lib/gitlab/gl_repository.rb +++ b/lib/gitlab/gl_repository.rb @@ -34,7 +34,7 @@ module Gitlab DESIGN = ::Gitlab::GlRepository::RepoType.new( name: :design, access_checker_class: ::Gitlab::GitAccessDesign, - repository_resolver: -> (project) { ::DesignManagement::Repository.new(project) }, + repository_resolver: -> (project) { ::DesignManagement::Repository.new(project: project) }, suffix: :design ).freeze diff --git a/lib/gitlab/gon_helper.rb b/lib/gitlab/gon_helper.rb index d7d06aa5271..eb071b44374 100644 --- a/lib/gitlab/gon_helper.rb +++ b/lib/gitlab/gon_helper.rb @@ -49,6 +49,7 @@ module Gitlab gon.ee = Gitlab.ee? gon.jh = Gitlab.jh? gon.dot_com = Gitlab.com? + gon.uf_error_prefix = ::Gitlab::Utils::ErrorMessage::UF_ERROR_PREFIX if current_user gon.current_user_id = current_user.id @@ -63,10 +64,9 @@ module Gitlab # made globally available to the frontend push_frontend_feature_flag(:usage_data_api, type: :ops) push_frontend_feature_flag(:security_auto_fix) - push_frontend_feature_flag(:new_header_search) push_frontend_feature_flag(:source_editor_toolbar) push_frontend_feature_flag(:vscode_web_ide, current_user) - push_frontend_feature_flag(:full_path_project_search, current_user) + push_frontend_feature_flag(:super_sidebar_peek) end # Exposes the state of a feature flag to the frontend code. diff --git a/lib/gitlab/graphql/authorize/authorize_resource.rb b/lib/gitlab/graphql/authorize/authorize_resource.rb index 983bdb9c0a2..e3548b97ebf 100644 --- a/lib/gitlab/graphql/authorize/authorize_resource.rb +++ b/lib/gitlab/graphql/authorize/authorize_resource.rb @@ -45,8 +45,8 @@ module Gitlab end end - def find_object(*args) - raise NotImplementedError, "Implement #find_object in #{self.class.name}" + def find_object(id:) + GitlabSchema.find_by_gid(id) end def authorized_find!(*args, **kwargs) diff --git a/lib/gitlab/graphql/deprecations/deprecation.rb b/lib/gitlab/graphql/deprecations/deprecation.rb index 7f4cea7c635..dfcca5ee75b 100644 --- a/lib/gitlab/graphql/deprecations/deprecation.rb +++ b/lib/gitlab/graphql/deprecations/deprecation.rb @@ -9,7 +9,7 @@ module Gitlab REASONS = { REASON_RENAMED => 'This was renamed.', - REASON_ALPHA => 'This feature is in Alpha. It can be changed or removed at any time.' + REASON_ALPHA => 'This feature is an Experiment. It can be changed or removed at any time.' }.freeze include ActiveModel::Validations @@ -27,7 +27,7 @@ module Gitlab return unless options if alpha - raise ArgumentError, '`alpha` and `deprecated` arguments cannot be passed at the same time' \ + raise ArgumentError, '`experiment` and `deprecated` arguments cannot be passed at the same time' \ if deprecated options[:reason] = :alpha diff --git a/lib/gitlab/graphql/loaders/lazy_relation_loader.rb b/lib/gitlab/graphql/loaders/lazy_relation_loader.rb new file mode 100644 index 00000000000..69056e87091 --- /dev/null +++ b/lib/gitlab/graphql/loaders/lazy_relation_loader.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +module Gitlab + module Graphql + module Loaders + class LazyRelationLoader + class << self + attr_accessor :model, :association + + # Automatically register the inheriting + # classes to GitlabSchema as lazy objects. + def inherited(klass) + GitlabSchema.lazy_resolve(klass, :load) + end + end + + def initialize(query_ctx, object, **kwargs) + @query_ctx = query_ctx + @object = object + @kwargs = kwargs + + query_ctx[loader_cache_key] ||= Registry.new(relation(**kwargs)) + query_ctx[loader_cache_key].register(object) + end + + # Returns an instance of `RelationProxy` for the object (parent model). + # The returned object behaves like an Active Record relation to support + # keyset pagination. + def load + case reflection.macro + when :has_many + relation_proxy + when :has_one + relation_proxy.last + else + raise 'Not supported association type!' + end + end + + private + + attr_reader :query_ctx, :object, :kwargs + + delegate :model, :association, to: :"self.class" + + # Implement this one if you want to filter the relation + def relation(**) + base_relation + end + + def loader_cache_key + @loader_cache_key ||= self.class.name.to_s + kwargs.sort.to_s + end + + def base_relation + placeholder_record.association(association).scope + end + + # This will only work for HasMany and HasOne associations for now + def placeholder_record + model.new(reflection.active_record_primary_key => 0) + end + + def reflection + model.reflections[association.to_s] + end + + def relation_proxy + RelationProxy.new(object, query_ctx[loader_cache_key]) + end + end + end + end +end diff --git a/lib/gitlab/graphql/loaders/lazy_relation_loader/registry.rb b/lib/gitlab/graphql/loaders/lazy_relation_loader/registry.rb new file mode 100644 index 00000000000..ab2b2bd4dc2 --- /dev/null +++ b/lib/gitlab/graphql/loaders/lazy_relation_loader/registry.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +module Gitlab + module Graphql + module Loaders + class LazyRelationLoader + class Registry + PrematureQueryExecutionTriggered = Class.new(RuntimeError) + # Following methods are Active Record kicker methods which fire SQL query. + # We can support some of them with TopNLoader but for now restricting their use + # as we don't have a use case. + PROHIBITED_METHODS = ( + ActiveRecord::FinderMethods.instance_methods(false) + + ActiveRecord::Calculations.instance_methods(false) + ).to_set.freeze + + def initialize(relation) + @parents = [] + @relation = relation + @records = [] + @loaded = false + end + + def register(object) + @parents << object + end + + def method_missing(method_name, ...) + raise PrematureQueryExecutionTriggered if PROHIBITED_METHODS.include?(method_name) + + result = relation.public_send(method_name, ...) # rubocop:disable GitlabSecurity/PublicSend + + if result.is_a?(ActiveRecord::Relation) # Spawn methods generate a new relation (e.g. where, limit) + @relation = result + + return self + end + + result + end + + def respond_to_missing?(method_name, include_private = false) + relation.respond_to?(method_name, include_private) + end + + def load + return records if loaded + + @loaded = true + @records = TopNLoader.load(relation, parents) + end + + def for(object) + load.select { |record| record[foreign_key] == object[active_record_primary_key] } + .tap { |records| set_inverse_of(object, records) } + end + + private + + attr_reader :parents, :relation, :records, :loaded + + delegate :proxy_association, to: :relation, private: true + delegate :reflection, to: :proxy_association, private: true + delegate :active_record_primary_key, :foreign_key, to: :reflection, private: true + + def set_inverse_of(object, records) + records.each do |record| + object.association(reflection.name).set_inverse_instance(record) + end + end + end + end + end + end +end diff --git a/lib/gitlab/graphql/loaders/lazy_relation_loader/relation_proxy.rb b/lib/gitlab/graphql/loaders/lazy_relation_loader/relation_proxy.rb new file mode 100644 index 00000000000..bab2a272fb0 --- /dev/null +++ b/lib/gitlab/graphql/loaders/lazy_relation_loader/relation_proxy.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module Gitlab + module Graphql + module Loaders + class LazyRelationLoader + # Proxies all the method calls to Registry instance. + # The main purpose of having this is that calling load + # on an instance of this class will only return the records + # associated with the main Active Record model. + class RelationProxy + def initialize(object, registry) + @object = object + @registry = registry + end + + def load + registry.for(object) + end + alias_method :to_a, :load + + def last(limit = 1) + result = registry.limit(limit) + .reverse_order! + .for(object) + + return result.first if limit == 1 # This is the Active Record behavior + + result + end + + private + + attr_reader :registry, :object + + # Delegate everything to registry + def method_missing(method_name, ...) + result = registry.public_send(method_name, ...) # rubocop:disable GitlabSecurity/PublicSend + + return self if result == registry + + result + end + + def respond_to_missing?(method_name, include_private = false) + registry.respond_to?(method_name, include_private) + end + end + end + end + end +end diff --git a/lib/gitlab/graphql/loaders/lazy_relation_loader/top_n_loader.rb b/lib/gitlab/graphql/loaders/lazy_relation_loader/top_n_loader.rb new file mode 100644 index 00000000000..6404148832b --- /dev/null +++ b/lib/gitlab/graphql/loaders/lazy_relation_loader/top_n_loader.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +# rubocop:disable CodeReuse/ActiveRecord +module Gitlab + module Graphql + module Loaders + class LazyRelationLoader + # Loads the top-n records for each given parent record. + # For example; if you want to load only 5 confidential issues ordered by + # their updated_at column per project for a list of projects by issuing only a single + # SQL query then this class can help you. + # Note that the limit applies per parent record which means that if you apply limit as 5 + # for 10 projects, this loader will load 50 records in total. + class TopNLoader + def self.load(original_relation, parents) + new(original_relation, parents).load + end + + def initialize(original_relation, parents) + @original_relation = original_relation + @parents = parents + end + + def load + klass.select(klass.arel_table[Arel.star]) + .from(from) + .joins("JOIN LATERAL (#{lateral_relation.to_sql}) AS #{klass.arel_table.name} ON true") + .includes(original_includes) + .preload(original_preload) + .eager_load(original_eager_load) + .load + end + + private + + attr_reader :original_relation, :parents + + delegate :proxy_association, to: :original_relation, private: true + delegate :reflection, to: :proxy_association, private: true + delegate :klass, :foreign_key, :active_record, :active_record_primary_key, + to: :reflection, private: true + + # This only works for HasMany and HasOne. + def lateral_relation + original_relation + .unscope(where: foreign_key) # unscoping the where condition generated for the placeholder_record. + .where(klass.arel_table[foreign_key].eq(active_record.arel_table[active_record_primary_key])) + end + + def from + grouping_arel_node.as("#{active_record.arel_table.name}(#{active_record.primary_key})") + end + + def grouping_arel_node + Arel::Nodes::Grouping.new(id_list_arel_node) + end + + def id_list_arel_node + parent_ids.map { |id| [id] } + .then { |ids| Arel::Nodes::ValuesList.new(ids) } + end + + def parent_ids + parents.pluck(active_record.primary_key) + end + + def original_includes + original_relation.includes_values + end + + def original_preload + original_relation.preload_values + end + + def original_eager_load + original_relation.eager_load_values + end + end + end + end + end +end +# rubocop:enable CodeReuse/ActiveRecord diff --git a/lib/gitlab/graphql/pagination/connections.rb b/lib/gitlab/graphql/pagination/connections.rb index 965c01dd02f..df1231b005f 100644 --- a/lib/gitlab/graphql/pagination/connections.rb +++ b/lib/gitlab/graphql/pagination/connections.rb @@ -14,6 +14,10 @@ module Gitlab Gitlab::Graphql::Pagination::Keyset::Connection) schema.connections.add( + Gitlab::Graphql::Loaders::LazyRelationLoader::RelationProxy, + Gitlab::Graphql::Pagination::Keyset::Connection) + + schema.connections.add( Gitlab::Graphql::ExternallyPaginatedArray, Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection) diff --git a/lib/gitlab/graphql/project/dast_profile_connection_extension.rb b/lib/gitlab/graphql/project/dast_profile_connection_extension.rb index 45f90de2f17..1c21d286187 100644 --- a/lib/gitlab/graphql/project/dast_profile_connection_extension.rb +++ b/lib/gitlab/graphql/project/dast_profile_connection_extension.rb @@ -12,9 +12,12 @@ module Gitlab def preload_authorizations(dast_profiles) return unless dast_profiles - projects = dast_profiles.map(&:project) - users = dast_profiles.filter_map { |dast_profile| dast_profile.dast_profile_schedule&.owner } - Preloaders::UsersMaxAccessLevelInProjectsPreloader.new(projects: projects, users: users).execute + project_users = dast_profiles.group_by(&:project).transform_values do |project_profiles| + project_profiles + .filter_map { |profile| profile.dast_profile_schedule&.owner } + .uniq + end + Preloaders::UsersMaxAccessLevelByProjectPreloader.new(project_users: project_users).execute end end end diff --git a/lib/gitlab/graphql/subscriptions/action_cable_with_load_balancing.rb b/lib/gitlab/graphql/subscriptions/action_cable_with_load_balancing.rb new file mode 100644 index 00000000000..a74d8982d73 --- /dev/null +++ b/lib/gitlab/graphql/subscriptions/action_cable_with_load_balancing.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module Gitlab + module Graphql + module Subscriptions + class ActionCableWithLoadBalancing < ::GraphQL::Subscriptions::ActionCableSubscriptions + extend ::Gitlab::Utils::Override + + # When executing updates we are usually responding to a broadcast as a result of a DB update. + # We use the primary so that we are sure that we are returning the newly updated data. + override :execute_update + def execute_update(subscription_id, event, object) + ::Gitlab::Database::LoadBalancing::Session.current.use_primary! + + super + end + end + end + end +end diff --git a/lib/gitlab/harbor/client.rb b/lib/gitlab/harbor/client.rb index ee40725ba95..380e4e42bc7 100644 --- a/lib/gitlab/harbor/client.rb +++ b/lib/gitlab/harbor/client.rb @@ -14,9 +14,9 @@ module Gitlab @integration = integration end - def ping - options = { headers: headers.merge!('Accept': 'text/plain') } - response = Gitlab::HTTP.get(url('ping'), options) + def check_project_availability + options = { headers: headers.merge!('Accept': 'application/json') } + response = Gitlab::HTTP.head(url("projects?project_name=#{integration.project_name}"), options) { success: response.success? } end diff --git a/lib/gitlab/hook_data/base_builder.rb b/lib/gitlab/hook_data/base_builder.rb index e5bae61ae4e..4a81f6b8a0e 100644 --- a/lib/gitlab/hook_data/base_builder.rb +++ b/lib/gitlab/hook_data/base_builder.rb @@ -5,15 +5,14 @@ module Gitlab class BaseBuilder attr_accessor :object - MARKDOWN_SIMPLE_IMAGE = %r{ - #{::Gitlab::Regex.markdown_code_or_html_blocks} - | - (?<image> - ! - \[(?<title>[^\n]*?)\] - \((?<url>(?!(https?://|//))[^\n]+?)\) - ) - }mx.freeze + MARKDOWN_SIMPLE_IMAGE = + "#{::Gitlab::Regex.markdown_code_or_html_blocks_untrusted}" \ + '|' \ + '(?P<image>' \ + '!' \ + '\[(?P<title>[^\n]*?)\]' \ + '\((?P<url>(?P<https>(https?://|//)?)[^\n]+?)\)' \ + ')'.freeze def initialize(object) @object = object @@ -37,15 +36,18 @@ module Gitlab def absolute_image_urls(markdown_text) return markdown_text unless markdown_text.present? - markdown_text.gsub(MARKDOWN_SIMPLE_IMAGE) do - if $~[:image] - url = $~[:url] + regex = Gitlab::UntrustedRegexp.new(MARKDOWN_SIMPLE_IMAGE, multiline: false) + return markdown_text unless regex.match?(markdown_text) + + regex.replace_gsub(markdown_text) do |match| + if match[:image] && !match[:https] + url = match[:url] url = "#{uploads_prefix}#{url}" if url.start_with?('/uploads') url = "/#{url}" unless url.start_with?('/') - "![#{$~[:title]}](#{Gitlab.config.gitlab.url}#{url})" + "![#{match[:title]}](#{Gitlab.config.gitlab.url}#{url})" else - $~[0] + match.to_s end end end diff --git a/lib/gitlab/http_connection_adapter.rb b/lib/gitlab/http_connection_adapter.rb index c6f9f2df299..2152f619228 100644 --- a/lib/gitlab/http_connection_adapter.rb +++ b/lib/gitlab/http_connection_adapter.rb @@ -59,8 +59,6 @@ module Gitlab end def dns_rebind_protection? - return false if Gitlab.http_proxy_env? - Gitlab::CurrentSettings.dns_rebinding_protection_enabled? end diff --git a/lib/gitlab/i18n.rb b/lib/gitlab/i18n.rb index a1b6e937396..af7c53abf09 100644 --- a/lib/gitlab/i18n.rb +++ b/lib/gitlab/i18n.rb @@ -44,7 +44,7 @@ module Gitlab TRANSLATION_LEVELS = { 'bg' => 0, 'cs_CZ' => 0, - 'da_DK' => 33, + 'da_DK' => 32, 'de' => 15, 'en' => 100, 'eo' => 0, @@ -54,18 +54,18 @@ module Gitlab 'gl_ES' => 0, 'id_ID' => 0, 'it' => 1, - 'ja' => 31, + 'ja' => 32, 'ko' => 19, 'nb_NO' => 22, 'nl_NL' => 0, 'pl_PL' => 3, 'pt_BR' => 56, - 'ro_RO' => 89, - 'ru' => 25, + 'ro_RO' => 87, + 'ru' => 24, 'si_LK' => 10, 'tr_TR' => 10, 'uk' => 53, - 'zh_CN' => 96, + 'zh_CN' => 99, 'zh_HK' => 1, 'zh_TW' => 98 }.freeze diff --git a/lib/gitlab/import/metrics.rb b/lib/gitlab/import/metrics.rb index e457d9ec57c..8263df3dc37 100644 --- a/lib/gitlab/import/metrics.rb +++ b/lib/gitlab/import/metrics.rb @@ -32,14 +32,14 @@ module Gitlab return unless project.github_import? track_usage_event(:github_import_project_failure, project.id) - track_import_state('github') + track_import_state('github', 'Import::GithubService') end def track_canceled_import return unless project.github_import? track_usage_event(:github_import_project_cancelled, project.id) - track_import_state('github') + track_import_state('github', 'Import::GithubService') end def issues_counter @@ -83,7 +83,7 @@ module Gitlab def track_finish_metric return unless project.github_import? - track_import_state('github') + track_import_state('github', 'Import::GithubService') case project.beautified_import_status_name when 'partially completed' @@ -93,13 +93,14 @@ module Gitlab end end - def track_import_state(type) + def track_import_state(type, category) Gitlab::Tracking.event( - importer, + category, 'create', label: "#{type}_import_project_state", project: project, - extra: { import_type: type, state: project.beautified_import_status_name } + import_type: type, + state: project.beautified_import_status_name ) end end diff --git a/lib/gitlab/import_export/base/relation_factory.rb b/lib/gitlab/import_export/base/relation_factory.rb index e3813070aa4..3d96e891797 100644 --- a/lib/gitlab/import_export/base/relation_factory.rb +++ b/lib/gitlab/import_export/base/relation_factory.rb @@ -295,6 +295,13 @@ module Gitlab end def unique_relation? + # this guard is necessary because + # when multiple approval_project_rules_protected_branch referenced the same protected branch + # or approval_project_rules_user referenced the same user + # the different instances were squashed into one + # because this method returned true for reason that needs investigation + return if @relation_sym == :approval_rules + strong_memoize(:unique_relation) do importable_foreign_key.present? && (has_unique_index_on_importable_fk? || uses_importable_fk_as_primary_key?) diff --git a/lib/gitlab/import_export/config.rb b/lib/gitlab/import_export/config.rb index 423e0933605..e1a62e3b25a 100644 --- a/lib/gitlab/import_export/config.rb +++ b/lib/gitlab/import_export/config.rb @@ -52,7 +52,7 @@ module Gitlab end def parse_yaml - YAML.load_file(@config) + YAML.safe_load_file(@config, aliases: true, permitted_classes: [Symbol]) end end end diff --git a/lib/gitlab/import_export/group/relation_tree_restorer.rb b/lib/gitlab/import_export/group/relation_tree_restorer.rb index 624acd3bb2a..5825db89201 100644 --- a/lib/gitlab/import_export/group/relation_tree_restorer.rb +++ b/lib/gitlab/import_export/group/relation_tree_restorer.rb @@ -34,7 +34,6 @@ module Gitlab update_params! BulkInsertableAssociations.with_bulk_insert(enabled: bulk_insert_enabled) do - fix_ci_pipelines_not_sorted_on_legacy_project_json! create_relations! end end @@ -275,15 +274,6 @@ module Gitlab } end - # Temporary fix for https://gitlab.com/gitlab-org/gitlab/-/issues/27883 when import from legacy project.json - # This should be removed once legacy JSON format is deprecated. - # Ndjson export file will fix the order during project export. - def fix_ci_pipelines_not_sorted_on_legacy_project_json! - return unless @relation_reader.legacy? - - @relation_reader.sort_ci_pipelines_by_id - end - # Enable logging of each top-level relation creation when Importing into a Group def log_relation_creation(importable, relation_key, relation_object) root_ancestor_group = importable.try(:root_ancestor) diff --git a/lib/gitlab/import_export/json/legacy_reader.rb b/lib/gitlab/import_export/json/legacy_reader.rb deleted file mode 100644 index ee360020556..00000000000 --- a/lib/gitlab/import_export/json/legacy_reader.rb +++ /dev/null @@ -1,123 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module ImportExport - module Json - class LegacyReader - class File < LegacyReader - include Gitlab::Utils::StrongMemoize - - def initialize(path, relation_names:, allowed_path: nil) - @path = path - super( - relation_names: relation_names, - allowed_path: allowed_path) - end - - def exist? - ::File.exist?(@path) - end - - protected - - def tree_hash - strong_memoize(:tree_hash) do - read_hash - end - end - - def read_hash - Gitlab::Json.parse(::File.read(@path)) - rescue StandardError => e - Gitlab::ErrorTracking.log_exception(e) - raise Gitlab::ImportExport::Error, 'Incorrect JSON format' - end - end - - class Hash < LegacyReader - def initialize(tree_hash, relation_names:, allowed_path: nil) - @tree_hash = tree_hash - super( - relation_names: relation_names, - allowed_path: allowed_path) - end - - def exist? - @tree_hash.present? - end - - protected - - attr_reader :tree_hash - end - - def initialize(relation_names:, allowed_path:) - @relation_names = relation_names.map(&:to_s) - @consumed_relations = Set.new - - # This is legacy reader, to be used in transition - # period before `.ndjson`, - # we strong validate what is being readed - @allowed_path = allowed_path - end - - def exist? - raise NotImplementedError - end - - def legacy? - true - end - - def consume_attributes(importable_path) - unless importable_path == @allowed_path - raise ArgumentError, "Invalid #{importable_path} passed to `consume_attributes`. Use #{@allowed_path} instead." - end - - attributes - end - - def consume_relation(importable_path, key) - unless importable_path == @allowed_path - raise ArgumentError, "Invalid #{importable_name} passed to `consume_relation`. Use #{@allowed_path} instead." - end - - Enumerator.new do |documents| - next unless @consumed_relations.add?("#{importable_path}/#{key}") - - value = relations.delete(key) - next if value.nil? - - if value.is_a?(Array) - value.each.with_index do |item, idx| - documents << [item, idx] - end - else - documents << [value, 0] - end - end - end - - def sort_ci_pipelines_by_id - relations['ci_pipelines']&.sort_by! { |hash| hash['id'] } - end - - private - - attr_reader :relation_names, :allowed_path - - def tree_hash - raise NotImplementedError - end - - def attributes - @attributes ||= tree_hash.slice!(*relation_names) - end - - def relations - @relations ||= tree_hash.extract!(*relation_names) - end - end - end - end -end diff --git a/lib/gitlab/import_export/json/legacy_writer.rb b/lib/gitlab/import_export/json/legacy_writer.rb deleted file mode 100644 index e03ab9f7650..00000000000 --- a/lib/gitlab/import_export/json/legacy_writer.rb +++ /dev/null @@ -1,88 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module ImportExport - module Json - class LegacyWriter - include Gitlab::ImportExport::CommandLineUtil - - attr_reader :path - - def initialize(path, allowed_path:) - @path = path - @keys = Set.new - - # This is legacy writer, to be used in transition - # period before `.ndjson`, - # we strong validate what is being written - @allowed_path = allowed_path - - mkdir_p(File.dirname(@path)) - file.write('{}') - end - - def close - @file&.close - @file = nil - end - - def write_attributes(exportable_path, hash) - unless exportable_path == @allowed_path - raise ArgumentError, "Invalid #{exportable_path}" - end - - hash.each do |key, value| - write(key, value) - end - end - - def write_relation(exportable_path, key, value) - unless exportable_path == @allowed_path - raise ArgumentError, "Invalid #{exportable_path}" - end - - write(key, value) - end - - def write_relation_array(exportable_path, key, items) - unless exportable_path == @allowed_path - raise ArgumentError, "Invalid #{exportable_path}" - end - - write(key, []) - - # rewind by two bytes, to overwrite ']}' - file.pos = file.size - 2 - - items.each_with_index do |item, idx| - file.write(',') if idx > 0 - file.write(item.to_json) - end - - file.write(']}') - end - - private - - def write(key, value) - raise ArgumentError, "key '#{key}' already written" if @keys.include?(key) - - # rewind by one byte, to overwrite '}' - file.pos = file.size - 1 - - file.write(',') if @keys.any? - file.write(key.to_json) - file.write(':') - file.write(value.to_json) - file.write('}') - - @keys.add(key) - end - - def file - @file ||= File.open(@path, "wb") - end - end - end - end -end diff --git a/lib/gitlab/import_export/json/ndjson_reader.rb b/lib/gitlab/import_export/json/ndjson_reader.rb index 510da61d3ab..3de56aacf18 100644 --- a/lib/gitlab/import_export/json/ndjson_reader.rb +++ b/lib/gitlab/import_export/json/ndjson_reader.rb @@ -17,14 +17,12 @@ module Gitlab Dir.exist?(@dir_path) end - # This can be removed once legacy_reader is deprecated. - def legacy? - false - end - def consume_attributes(importable_path) # This reads from `tree/project.json` path = file_path("#{importable_path}.json") + + raise Gitlab::ImportExport::Error, 'Invalid file' if !File.exist?(path) || File.symlink?(path) + data = File.read(path, MAX_JSON_DOCUMENT_SIZE) json_decode(data) end @@ -36,7 +34,7 @@ module Gitlab # This reads from `tree/project/merge_requests.ndjson` path = file_path(importable_path, "#{key}.ndjson") - next unless File.exist?(path) + next if !File.exist?(path) || File.symlink?(path) File.foreach(path, MAX_JSON_DOCUMENT_SIZE).with_index do |line, line_num| documents << [json_decode(line), line_num] diff --git a/lib/gitlab/import_export/project/import_export.yml b/lib/gitlab/import_export/project/import_export.yml index 335096faed6..56cbc5f1bb4 100644 --- a/lib/gitlab/import_export/project/import_export.yml +++ b/lib/gitlab/import_export/project/import_export.yml @@ -1195,6 +1195,9 @@ ee: - :milestone - lists: - :milestone + - approval_rules: + - :approval_project_rules_protected_branches + - :approval_project_rules_users included_attributes: issuable_sla: @@ -1260,9 +1263,30 @@ ee: - :description iterations_cadence: - :title + approval_rules: + - :approvals_required + - :name + - :rule_type + - :scanners + - :vulnerabilities_allowed + - :severity_levels + - :report_type + - :vulnerability_states + - :orchestration_policy_idx + - :applies_to_all_protected_branches + approval_project_rules_protected_branches: + - :protected_branch + approval_project_rules_users: + - :user_id excluded_attributes: project: - :vulnerability_hooks_integrations + approval_rules: + - :created_at + - :updated_at + methods: + approval_project_rules_protected_branches: + - :branch_name preloads: issues: epic: diff --git a/lib/gitlab/import_export/project/object_builder.rb b/lib/gitlab/import_export/project/object_builder.rb index 0962ad9f028..ac28ae6bfe0 100644 --- a/lib/gitlab/import_export/project/object_builder.rb +++ b/lib/gitlab/import_export/project/object_builder.rb @@ -60,7 +60,7 @@ module Gitlab def prepare_attributes attributes.dup.tap do |atts| - atts.delete('group') unless epic? || iteration? + atts.delete('group') unless group_level_object? if label? atts['type'] = 'ProjectLabel' # Always create project labels @@ -142,10 +142,6 @@ module Gitlab klass == MergeRequestDiffCommit end - def iteration? - klass == Iteration - end - # If an existing group milestone used the IID # claim the IID back and set the group milestone to use one available # This is necessary to fix situations like the following: @@ -164,7 +160,11 @@ module Gitlab end def group_relation_without_group? - (epic? || iteration?) && group.nil? + group_level_object? && group.nil? + end + + def group_level_object? + epic? end end end diff --git a/lib/gitlab/import_export/project/relation_factory.rb b/lib/gitlab/import_export/project/relation_factory.rb index ab95e306abf..9afa7cc1dae 100644 --- a/lib/gitlab/import_export/project/relation_factory.rb +++ b/lib/gitlab/import_export/project/relation_factory.rb @@ -92,6 +92,7 @@ module Gitlab when :'Ci::PipelineSchedule' then setup_pipeline_schedule when :'ProtectedBranch::MergeAccessLevel' then setup_protected_branch_access_level when :'ProtectedBranch::PushAccessLevel' then setup_protected_branch_access_level + when :ApprovalProjectRulesProtectedBranch then setup_merge_approval_protected_branch when :releases then setup_release end @@ -195,6 +196,13 @@ module Gitlab root_ancestor.max_member_access_for_user(@user) == Gitlab::Access::OWNER end + def setup_merge_approval_protected_branch + source_branch_name = @relation_hash.delete('branch_name') + target_branch = @importable.protected_branches.find_by(name: source_branch_name) + + @relation_hash['protected_branch'] = target_branch + end + def compute_relative_position return unless max_relative_position diff --git a/lib/gitlab/import_export/project/relation_tree_restorer.rb b/lib/gitlab/import_export/project/relation_tree_restorer.rb index 47196db6f8a..b5247754199 100644 --- a/lib/gitlab/import_export/project/relation_tree_restorer.rb +++ b/lib/gitlab/import_export/project/relation_tree_restorer.rb @@ -5,10 +5,14 @@ module Gitlab module Project class RelationTreeRestorer < ImportExport::Group::RelationTreeRestorer # Relations which cannot be saved at project level (and have a group assigned) - GROUP_MODELS = [GroupLabel, Milestone, Epic, Iteration].freeze + GROUP_MODELS = [GroupLabel, Milestone, Epic].freeze private + def group_models + GROUP_MODELS + end + def bulk_insert_enabled true end @@ -19,9 +23,11 @@ module Gitlab end def relation_invalid_for_importable?(relation_object) - GROUP_MODELS.include?(relation_object.class) && relation_object.group_id + group_models.include?(relation_object.class) && relation_object.group_id end end end end end + +Gitlab::ImportExport::Project::RelationTreeRestorer.prepend_mod diff --git a/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb b/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb index 034122a9f14..639f34980ff 100644 --- a/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb +++ b/lib/gitlab/import_export/project/sample/relation_tree_restorer.rb @@ -18,8 +18,6 @@ module Gitlab end def dates - return [] if @relation_reader.legacy? - RelationFactory::DATE_MODELS.flat_map do |tag| @relation_reader.consume_relation(@importable_path, tag, mark_as_consumed: false).map do |model| model.first['due_date'] diff --git a/lib/gitlab/import_export/project/tree_restorer.rb b/lib/gitlab/import_export/project/tree_restorer.rb index 47f82a901b7..e791424875a 100644 --- a/lib/gitlab/import_export/project/tree_restorer.rb +++ b/lib/gitlab/import_export/project/tree_restorer.rb @@ -17,7 +17,7 @@ module Gitlab end def restore - unless relation_reader + unless relation_reader.exist? raise Gitlab::ImportExport::Error, 'invalid import format' end @@ -47,28 +47,11 @@ module Gitlab private def relation_reader - strong_memoize(:relation_reader) do - [ndjson_relation_reader, legacy_relation_reader] - .compact.find(&:exist?) - end - end - - def ndjson_relation_reader - return unless Feature.enabled?(:project_import_ndjson, project.namespace) - - ImportExport::Json::NdjsonReader.new( + @relation_reader ||= ImportExport::Json::NdjsonReader.new( File.join(shared.export_path, 'tree') ) end - def legacy_relation_reader - ImportExport::Json::LegacyReader::File.new( - File.join(shared.export_path, 'project.json'), - relation_names: reader.project_relation_names, - allowed_path: importable_path - ) - end - def relation_tree_restorer @relation_tree_restorer ||= relation_tree_restorer_class.new( user: @user, diff --git a/lib/gitlab/import_export/project/tree_saver.rb b/lib/gitlab/import_export/project/tree_saver.rb index 05b96f7e8ce..fd5fa73764e 100644 --- a/lib/gitlab/import_export/project/tree_saver.rb +++ b/lib/gitlab/import_export/project/tree_saver.rb @@ -81,13 +81,10 @@ module Gitlab end def json_writer - @json_writer ||= if ::Feature.enabled?(:project_export_as_ndjson, @project.namespace) - full_path = File.join(@shared.export_path, 'tree') - Gitlab::ImportExport::Json::NdjsonWriter.new(full_path) - else - full_path = File.join(@shared.export_path, ImportExport.project_filename) - Gitlab::ImportExport::Json::LegacyWriter.new(full_path, allowed_path: 'project') - end + @json_writer ||= begin + full_path = File.join(@shared.export_path, 'tree') + Gitlab::ImportExport::Json::NdjsonWriter.new(full_path) + end end end end diff --git a/lib/gitlab/incoming_email.rb b/lib/gitlab/incoming_email.rb deleted file mode 100644 index d34c19bc9fc..00000000000 --- a/lib/gitlab/incoming_email.rb +++ /dev/null @@ -1,34 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module IncomingEmail - class << self - include Gitlab::Email::Common - - def config - incoming_email_config - end - - def key_from_address(address, wildcard_address: nil) - wildcard_address ||= config.address - regex = address_regex(wildcard_address) - return unless regex - - match = address.match(regex) - return unless match - - match[1] - end - - private - - def address_regex(wildcard_address) - return unless wildcard_address - - regex = Regexp.escape(wildcard_address) - regex = regex.sub(Regexp.escape(WILDCARD_PLACEHOLDER), '(.+)') - Regexp.new(/\A<?#{regex}>?\z/).freeze - end - end - end -end diff --git a/lib/gitlab/instrumentation/redis.rb b/lib/gitlab/instrumentation/redis.rb index a664656c467..590153ad9cd 100644 --- a/lib/gitlab/instrumentation/redis.rb +++ b/lib/gitlab/instrumentation/redis.rb @@ -19,8 +19,8 @@ module Gitlab end << ActionCable ).freeze - # Milliseconds represented in seconds (from 1 millisecond to 2 seconds). - QUERY_TIME_BUCKETS = [0.001, 0.0025, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2].freeze + # Milliseconds represented in seconds + QUERY_TIME_BUCKETS = [0.1, 0.25, 0.5].freeze class << self include ::Gitlab::Instrumentation::RedisPayload diff --git a/lib/gitlab/kubernetes/helm/api.rb b/lib/gitlab/kubernetes/helm/api.rb deleted file mode 100644 index ceda18442d6..00000000000 --- a/lib/gitlab/kubernetes/helm/api.rb +++ /dev/null @@ -1,126 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - class API - def initialize(kubeclient) - @kubeclient = kubeclient - @namespace = Gitlab::Kubernetes::Namespace.new( - Gitlab::Kubernetes::Helm::NAMESPACE, - kubeclient, - labels: Gitlab::Kubernetes::Helm::NAMESPACE_LABELS - ) - end - - def install(command) - namespace.ensure_exists! - - create_service_account(command) - create_cluster_role_binding(command) - create_config_map(command) - - delete_pod!(command.pod_name) - kubeclient.create_pod(command.pod_resource) - end - - alias_method :update, :install - - def uninstall(command) - namespace.ensure_exists! - create_config_map(command) - - delete_pod!(command.pod_name) - kubeclient.create_pod(command.pod_resource) - end - - ## - # Returns Pod phase - # - # https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-phase - # - # values: "Pending", "Running", "Succeeded", "Failed", "Unknown" - # - def status(pod_name) - kubeclient.get_pod(pod_name, namespace.name).status.phase - end - - def log(pod_name) - kubeclient.get_pod_log(pod_name, namespace.name).body - end - - def delete_pod!(pod_name) - kubeclient.delete_pod(pod_name, namespace.name) - rescue ::Kubeclient::ResourceNotFoundError - # no-op - end - - def get_config_map(config_map_name) - namespace.ensure_exists! - - kubeclient.get_config_map(config_map_name, namespace.name) - end - - private - - attr_reader :kubeclient, :namespace - - def create_config_map(command) - command.config_map_resource.tap do |config_map_resource| - break unless config_map_resource - - if config_map_exists?(config_map_resource) - kubeclient.update_config_map(config_map_resource) - else - kubeclient.create_config_map(config_map_resource) - end - end - end - - def update_config_map(command) - command.config_map_resource.tap do |config_map_resource| - kubeclient.update_config_map(config_map_resource) - end - end - - def create_service_account(command) - command.service_account_resource.tap do |service_account_resource| - break unless service_account_resource - - if service_account_exists?(service_account_resource) - kubeclient.update_service_account(service_account_resource) - else - kubeclient.create_service_account(service_account_resource) - end - end - end - - def create_cluster_role_binding(command) - command.cluster_role_binding_resource.tap do |cluster_role_binding_resource| - break unless cluster_role_binding_resource - - kubeclient.update_cluster_role_binding(cluster_role_binding_resource) - end - end - - def config_map_exists?(resource) - kubeclient.get_config_map(resource.metadata.name, resource.metadata.namespace) - rescue ::Kubeclient::ResourceNotFoundError - false - end - - def service_account_exists?(resource) - kubeclient.get_service_account(resource.metadata.name, resource.metadata.namespace) - rescue ::Kubeclient::ResourceNotFoundError - false - end - - def cluster_role_binding_exists?(resource) - kubeclient.get_cluster_role_binding(resource.metadata.name) - rescue ::Kubeclient::ResourceNotFoundError - false - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/pod.rb b/lib/gitlab/kubernetes/helm/pod.rb deleted file mode 100644 index 9d0207e6b1f..00000000000 --- a/lib/gitlab/kubernetes/helm/pod.rb +++ /dev/null @@ -1,82 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - class Pod - def initialize(command, namespace_name, service_account_name: nil) - @command = command - @namespace_name = namespace_name - @service_account_name = service_account_name - end - - def generate - spec = { containers: [container_specification], restartPolicy: 'Never' } - - spec[:volumes] = volumes_specification - spec[:containers][0][:volumeMounts] = volume_mounts_specification - spec[:serviceAccountName] = service_account_name if service_account_name - - ::Kubeclient::Resource.new(metadata: metadata, spec: spec) - end - - private - - attr_reader :command, :namespace_name, :service_account_name - - def container_specification - { - name: 'helm', - image: "registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/#{command.class::HELM_VERSION}-kube-#{Gitlab::Kubernetes::Helm::KUBECTL_VERSION}-alpine-3.12", - env: generate_pod_env(command), - command: %w(/bin/sh), - args: %w(-c $(COMMAND_SCRIPT)) - } - end - - def labels - { - 'gitlab.org/action': 'install', - 'gitlab.org/application': command.name - } - end - - def metadata - { - name: command.pod_name, - namespace: namespace_name, - labels: labels - } - end - - def generate_pod_env(command) - command.env.merge( - HELM_VERSION: command.class::HELM_VERSION, - COMMAND_SCRIPT: command.generate_script - ).map { |key, value| { name: key, value: value } } - end - - def volumes_specification - [ - { - name: 'configuration-volume', - configMap: { - name: "values-content-configuration-#{command.name}", - items: command.file_names.map { |name| { key: name, path: name } } - } - } - ] - end - - def volume_mounts_specification - [ - { - name: 'configuration-volume', - mountPath: "/data/helm/#{command.name}/config" - } - ] - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/base_command.rb b/lib/gitlab/kubernetes/helm/v2/base_command.rb deleted file mode 100644 index 26c77b2149e..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/base_command.rb +++ /dev/null @@ -1,93 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V2 - class BaseCommand - attr_reader :name, :files - - HELM_VERSION = '2.17.0' - - def initialize(rbac:, name:, files:) - @rbac = rbac - @name = name - @files = files - end - - def env - { TILLER_NAMESPACE: namespace } - end - - def rbac? - @rbac - end - - def pod_resource - pod_service_account_name = rbac? ? service_account_name : nil - - Gitlab::Kubernetes::Helm::Pod.new(self, namespace, service_account_name: pod_service_account_name).generate - end - - def generate_script - <<~HEREDOC - set -xeo pipefail - HEREDOC - end - - def pod_name - "install-#{name}" - end - - def config_map_resource - Gitlab::Kubernetes::ConfigMap.new(name, files).generate - end - - def service_account_resource - return unless rbac? - - Gitlab::Kubernetes::ServiceAccount.new(service_account_name, namespace).generate - end - - def cluster_role_binding_resource - return unless rbac? - - subjects = [{ kind: 'ServiceAccount', name: service_account_name, namespace: namespace }] - - Gitlab::Kubernetes::ClusterRoleBinding.new( - cluster_role_binding_name, - cluster_role_name, - subjects - ).generate - end - - def file_names - files.keys - end - - private - - def files_dir - "/data/helm/#{name}/config" - end - - def namespace - Gitlab::Kubernetes::Helm::NAMESPACE - end - - def service_account_name - Gitlab::Kubernetes::Helm::SERVICE_ACCOUNT - end - - def cluster_role_binding_name - Gitlab::Kubernetes::Helm::CLUSTER_ROLE_BINDING - end - - def cluster_role_name - Gitlab::Kubernetes::Helm::CLUSTER_ROLE - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/certificate.rb b/lib/gitlab/kubernetes/helm/v2/certificate.rb deleted file mode 100644 index 17ea2eb5188..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/certificate.rb +++ /dev/null @@ -1,75 +0,0 @@ -# frozen_string_literal: true -module Gitlab - module Kubernetes - module Helm - module V2 - class Certificate - INFINITE_EXPIRY = 1000.years - SHORT_EXPIRY = 30.minutes - - attr_reader :key, :cert - - def key_string - @key.to_s - end - - def cert_string - @cert.to_pem - end - - def self.from_strings(key_string, cert_string) - key = OpenSSL::PKey::RSA.new(key_string) - cert = OpenSSL::X509::Certificate.new(cert_string) - new(key, cert) - end - - def self.generate_root - _issue(signed_by: nil, expires_in: INFINITE_EXPIRY, certificate_authority: true) - end - - def issue(expires_in: SHORT_EXPIRY) - self.class._issue(signed_by: self, expires_in: expires_in, certificate_authority: false) - end - - private - - def self._issue(signed_by:, expires_in:, certificate_authority:) - key = OpenSSL::PKey::RSA.new(4096) - public_key = key.public_key - - subject = OpenSSL::X509::Name.parse("/C=US") - - cert = OpenSSL::X509::Certificate.new - cert.subject = subject - - cert.issuer = signed_by&.cert&.subject || subject - - cert.not_before = Time.now.utc - cert.not_after = expires_in.from_now.utc - cert.public_key = public_key - cert.serial = 0x0 - cert.version = 2 - - if certificate_authority - extension_factory = OpenSSL::X509::ExtensionFactory.new - extension_factory.subject_certificate = cert - extension_factory.issuer_certificate = cert - cert.add_extension(extension_factory.create_extension('subjectKeyIdentifier', 'hash')) - cert.add_extension(extension_factory.create_extension('basicConstraints', 'CA:TRUE', true)) - cert.add_extension(extension_factory.create_extension('keyUsage', 'cRLSign,keyCertSign', true)) - end - - cert.sign(signed_by&.key || key, OpenSSL::Digest.new('SHA256')) - - new(key, cert) - end - - def initialize(key, cert) - @key = key - @cert = cert - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/client_command.rb b/lib/gitlab/kubernetes/helm/v2/client_command.rb deleted file mode 100644 index 8b15af9aeea..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/client_command.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V2 - module ClientCommand - def init_command - <<~SHELL.chomp - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - SHELL - end - - def repository_command - ['helm', 'repo', 'add', name, repository].shelljoin if repository - end - - private - - def repository_update_command - 'helm repo update' - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/delete_command.rb b/lib/gitlab/kubernetes/helm/v2/delete_command.rb deleted file mode 100644 index 4d52fc1398f..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/delete_command.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V2 - class DeleteCommand < BaseCommand - include ClientCommand - - attr_reader :predelete, :postdelete - - def initialize(predelete: nil, postdelete: nil, **args) - super(**args) - @predelete = predelete - @postdelete = postdelete - end - - def generate_script - super + [ - init_command, - predelete, - delete_command, - postdelete - ].compact.join("\n") - end - - def pod_name - "uninstall-#{name}" - end - - def delete_command - ['helm', 'delete', '--purge', name].shelljoin - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/init_command.rb b/lib/gitlab/kubernetes/helm/v2/init_command.rb deleted file mode 100644 index f8b52feb5b6..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/init_command.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V2 - class InitCommand < BaseCommand - def generate_script - super + [ - init_helm_command - ].join("\n") - end - - private - - def init_helm_command - command = %w[helm init] + init_command_flags - - command.shelljoin - end - - def init_command_flags - tls_flags + optional_service_account_flag - end - - def tls_flags - [ - '--tiller-tls', - '--tiller-tls-verify', - '--tls-ca-cert', "#{files_dir}/ca.pem", - '--tiller-tls-cert', "#{files_dir}/cert.pem", - '--tiller-tls-key', "#{files_dir}/key.pem" - ] - end - - def optional_service_account_flag - return [] unless rbac? - - ['--service-account', service_account_name] - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/install_command.rb b/lib/gitlab/kubernetes/helm/v2/install_command.rb deleted file mode 100644 index c50db6bf177..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/install_command.rb +++ /dev/null @@ -1,87 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V2 - class InstallCommand < BaseCommand - include ClientCommand - - attr_reader :chart, :repository, :preinstall, :postinstall - attr_accessor :version - - def initialize(chart:, version: nil, repository: nil, preinstall: nil, postinstall: nil, **args) - super(**args) - @chart = chart - @version = version - @repository = repository - @preinstall = preinstall - @postinstall = postinstall - end - - def generate_script - super + [ - init_command, - repository_command, - repository_update_command, - preinstall, - install_command, - postinstall - ].compact.join("\n") - end - - private - - # Uses `helm upgrade --install` which means we can use this for both - # installation and uprade of applications - def install_command - command = ['helm', 'upgrade', name, chart] + - install_flag + - rollback_support_flag + - reset_values_flag + - optional_version_flag + - rbac_create_flag + - namespace_flag + - value_flag - - command.shelljoin - end - - def install_flag - ['--install'] - end - - def reset_values_flag - ['--reset-values'] - end - - def value_flag - ['-f', "/data/helm/#{name}/config/values.yaml"] - end - - def namespace_flag - ['--namespace', Gitlab::Kubernetes::Helm::NAMESPACE] - end - - def rbac_create_flag - if rbac? - %w[--set rbac.create=true,rbac.enabled=true] - else - %w[--set rbac.create=false,rbac.enabled=false] - end - end - - def optional_version_flag - return [] unless version - - ['--version', version] - end - - def rollback_support_flag - ['--atomic', '--cleanup-on-fail'] - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/patch_command.rb b/lib/gitlab/kubernetes/helm/v2/patch_command.rb deleted file mode 100644 index 40e56771e47..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/patch_command.rb +++ /dev/null @@ -1,67 +0,0 @@ -# frozen_string_literal: true - -# PatchCommand is for updating values in installed charts without overwriting -# existing values. -module Gitlab - module Kubernetes - module Helm - module V2 - class PatchCommand < BaseCommand - include ClientCommand - - attr_reader :chart, :repository - attr_accessor :version - - def initialize(chart:, version:, repository: nil, **args) - super(**args) - - # version is mandatory to prevent chart mismatches - # we do not want our values interpreted in the context of the wrong version - raise ArgumentError, 'version is required' if version.blank? - - @chart = chart - @version = version - @repository = repository - end - - def generate_script - super + [ - init_command, - repository_command, - repository_update_command, - upgrade_command - ].compact.join("\n") - end - - private - - def upgrade_command - command = ['helm', 'upgrade', name, chart] + - reuse_values_flag + - version_flag + - namespace_flag + - value_flag - - command.shelljoin - end - - def reuse_values_flag - ['--reuse-values'] - end - - def value_flag - ['-f', "/data/helm/#{name}/config/values.yaml"] - end - - def namespace_flag - ['--namespace', Gitlab::Kubernetes::Helm::NAMESPACE] - end - - def version_flag - ['--version', version] - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v2/reset_command.rb b/lib/gitlab/kubernetes/helm/v2/reset_command.rb deleted file mode 100644 index 00626501a9a..00000000000 --- a/lib/gitlab/kubernetes/helm/v2/reset_command.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V2 - class ResetCommand < BaseCommand - include ClientCommand - - def generate_script - super + [ - init_command, - reset_helm_command - ].join("\n") - end - - def pod_name - "uninstall-#{name}" - end - - private - - def reset_helm_command - 'helm reset --force' - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v3/base_command.rb b/lib/gitlab/kubernetes/helm/v3/base_command.rb deleted file mode 100644 index ca1bf5462f0..00000000000 --- a/lib/gitlab/kubernetes/helm/v3/base_command.rb +++ /dev/null @@ -1,101 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V3 - class BaseCommand - attr_reader :name, :files - - HELM_VERSION = '3.2.4' - - def initialize(rbac:, name:, files:) - @rbac = rbac - @name = name - @files = files - end - - def env - {} - end - - def rbac? - @rbac - end - - def pod_resource - pod_service_account_name = rbac? ? service_account_name : nil - - Gitlab::Kubernetes::Helm::Pod.new(self, namespace, service_account_name: pod_service_account_name).generate - end - - def generate_script - <<~HEREDOC - set -xeo pipefail - HEREDOC - end - - def pod_name - "install-#{name}" - end - - def config_map_resource - Gitlab::Kubernetes::ConfigMap.new(name, files).generate - end - - def service_account_resource - return unless rbac? - - Gitlab::Kubernetes::ServiceAccount.new(service_account_name, namespace).generate - end - - def cluster_role_binding_resource - return unless rbac? - - subjects = [{ kind: 'ServiceAccount', name: service_account_name, namespace: namespace }] - - Gitlab::Kubernetes::ClusterRoleBinding.new( - cluster_role_binding_name, - cluster_role_name, - subjects - ).generate - end - - def file_names - files.keys - end - - def repository_command - ['helm', 'repo', 'add', name, repository].shelljoin if repository - end - - private - - def repository_update_command - 'helm repo update' - end - - def namespace_flag - ['--namespace', Gitlab::Kubernetes::Helm::NAMESPACE] - end - - def namespace - Gitlab::Kubernetes::Helm::NAMESPACE - end - - def service_account_name - Gitlab::Kubernetes::Helm::SERVICE_ACCOUNT - end - - def cluster_role_binding_name - Gitlab::Kubernetes::Helm::CLUSTER_ROLE_BINDING - end - - def cluster_role_name - Gitlab::Kubernetes::Helm::CLUSTER_ROLE - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v3/delete_command.rb b/lib/gitlab/kubernetes/helm/v3/delete_command.rb deleted file mode 100644 index f628e852f54..00000000000 --- a/lib/gitlab/kubernetes/helm/v3/delete_command.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V3 - class DeleteCommand < BaseCommand - attr_reader :predelete, :postdelete - - def initialize(predelete: nil, postdelete: nil, **args) - super(**args) - @predelete = predelete - @postdelete = postdelete - end - - def generate_script - super + [ - predelete, - delete_command, - postdelete - ].compact.join("\n") - end - - def pod_name - "uninstall-#{name}" - end - - def delete_command - ['helm', 'uninstall', name, *namespace_flag].shelljoin - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v3/install_command.rb b/lib/gitlab/kubernetes/helm/v3/install_command.rb deleted file mode 100644 index 8d521f0dcd4..00000000000 --- a/lib/gitlab/kubernetes/helm/v3/install_command.rb +++ /dev/null @@ -1,80 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Kubernetes - module Helm - module V3 - class InstallCommand < BaseCommand - attr_reader :chart, :repository, :preinstall, :postinstall - attr_accessor :version - - def initialize(chart:, version: nil, repository: nil, preinstall: nil, postinstall: nil, **args) - super(**args) - @chart = chart - @version = version - @repository = repository - @preinstall = preinstall - @postinstall = postinstall - end - - def generate_script - super + [ - repository_command, - repository_update_command, - preinstall, - install_command, - postinstall - ].compact.join("\n") - end - - private - - # Uses `helm upgrade --install` which means we can use this for both - # installation and uprade of applications - def install_command - command = ['helm', 'upgrade', name, chart] + - install_flag + - rollback_support_flag + - reset_values_flag + - optional_version_flag + - rbac_create_flag + - namespace_flag + - value_flag - - command.shelljoin - end - - def install_flag - ['--install'] - end - - def reset_values_flag - ['--reset-values'] - end - - def value_flag - ['-f', "/data/helm/#{name}/config/values.yaml"] - end - - def rbac_create_flag - if rbac? - %w[--set rbac.create=true,rbac.enabled=true] - else - %w[--set rbac.create=false,rbac.enabled=false] - end - end - - def optional_version_flag - return [] unless version - - ['--version', version] - end - - def rollback_support_flag - ['--atomic', '--cleanup-on-fail'] - end - end - end - end - end -end diff --git a/lib/gitlab/kubernetes/helm/v3/patch_command.rb b/lib/gitlab/kubernetes/helm/v3/patch_command.rb deleted file mode 100644 index 1278e524bd2..00000000000 --- a/lib/gitlab/kubernetes/helm/v3/patch_command.rb +++ /dev/null @@ -1,60 +0,0 @@ -# frozen_string_literal: true - -# PatchCommand is for updating values in installed charts without overwriting -# existing values. -module Gitlab - module Kubernetes - module Helm - module V3 - class PatchCommand < BaseCommand - attr_reader :chart, :repository - attr_accessor :version - - def initialize(chart:, version:, repository: nil, **args) - super(**args) - - # version is mandatory to prevent chart mismatches - # we do not want our values interpreted in the context of the wrong version - raise ArgumentError, 'version is required' if version.blank? - - @chart = chart - @version = version - @repository = repository - end - - def generate_script - super + [ - repository_command, - repository_update_command, - upgrade_command - ].compact.join("\n") - end - - private - - def upgrade_command - command = ['helm', 'upgrade', name, chart] + - reuse_values_flag + - version_flag + - namespace_flag + - value_flag - - command.shelljoin - end - - def reuse_values_flag - ['--reuse-values'] - end - - def value_flag - ['-f', "/data/helm/#{name}/config/values.yaml"] - end - - def version_flag - ['--version', version] - end - end - end - end - end -end diff --git a/lib/gitlab/legacy_github_import/user_formatter.rb b/lib/gitlab/legacy_github_import/user_formatter.rb index d45a166d2b7..8fd8354e59c 100644 --- a/lib/gitlab/legacy_github_import/user_formatter.rb +++ b/lib/gitlab/legacy_github_import/user_formatter.rb @@ -5,6 +5,8 @@ module Gitlab class UserFormatter attr_reader :client, :raw + GITEA_GHOST_EMAIL = 'ghost_user@gitea_import_dummy_email.com' + def initialize(client, raw) @client = client @raw = raw @@ -27,7 +29,14 @@ module Gitlab private def email - @email ||= client.user(raw[:login]).to_h[:email] + # Gitea marks deleted users as 'Ghost' users and removes them from + # their system. So for Gitea 'Ghost' users we need to assign a dummy + # email address to avoid querying the Gitea api for a non existing user + if raw[:login] == 'Ghost' && raw[:id] == -1 + @email = GITEA_GHOST_EMAIL + else + @email ||= client.user(raw[:login]).to_h[:email] + end end def find_by_email diff --git a/lib/gitlab/mail_room.rb b/lib/gitlab/mail_room.rb index bad2e265f73..5f760e764c8 100644 --- a/lib/gitlab/mail_room.rb +++ b/lib/gitlab/mail_room.rb @@ -11,6 +11,20 @@ require_relative 'redis/queues' unless defined?(Gitlab::Redis::Queues) # This service is run independently of the main Rails process, # therefore the `Rails` class and its methods are unavailable. +# TODO: Remove this once we're on Ruby 3 +# https://gitlab.com/gitlab-org/gitlab/-/issues/393651 +unless YAML.respond_to?(:safe_load_file) + module YAML + # Temporary Ruby 2 back-compat workaround. + # + # This method only exists as of stdlib 3.0.0: + # https://ruby-doc.org/stdlib-3.0.0/libdoc/psych/rdoc/Psych.html + def self.safe_load_file(path, **options) + YAML.safe_load(File.read(path), **options) + end + end +end + module Gitlab module MailRoom RAILS_ROOT_DIR = Pathname.new('../..').expand_path(__dir__).freeze @@ -129,7 +143,7 @@ module Gitlab end def load_yaml - @yaml ||= YAML.load_file(config_file)[rails_env].deep_symbolize_keys + @yaml ||= YAML.safe_load_file(config_file, aliases: true)[rails_env].deep_symbolize_keys end def application_secrets_file diff --git a/lib/gitlab/metrics/subscribers/action_cable.rb b/lib/gitlab/metrics/subscribers/action_cable.rb index 9f955dfe79f..50d843cc72f 100644 --- a/lib/gitlab/metrics/subscribers/action_cable.rb +++ b/lib/gitlab/metrics/subscribers/action_cable.rb @@ -6,6 +6,10 @@ module Gitlab class ActionCable < ActiveSupport::Subscriber include Gitlab::Utils::StrongMemoize + BROADCASTING_GRAPHQL_EVENT = 'graphql-event' + BROADCASTING_GRAPHQL_SUBSCRIPTION = 'graphql-subscription' + BROADCASTING_OTHER = 'other' + attach_to :action_cable SINGLE_CLIENT_TRANSMISSION = :action_cable_single_client_transmissions_total @@ -35,11 +39,25 @@ module Gitlab end def broadcast(event) - broadcast_counter.increment + broadcast_counter.increment({ broadcasting: broadcasting_from(event.payload) }) end private + # Since broadcastings can have high dimensionality when they carry IDs, we need to + # collapse them. If it's not a well-know broadcast, we report it as "other". + def broadcasting_from(payload) + broadcasting = payload[:broadcasting] + if broadcasting.start_with?(BROADCASTING_GRAPHQL_EVENT) + # Take at most two levels of topic namespacing. + broadcasting.split(':').reject(&:empty?).take(2).join(':') # rubocop: disable CodeReuse/ActiveRecord + elsif broadcasting.start_with?(BROADCASTING_GRAPHQL_SUBSCRIPTION) + BROADCASTING_GRAPHQL_SUBSCRIPTION + else + BROADCASTING_OTHER + end + end + # When possible tries to query operation name def operation_name_from(payload) data = payload.dig(:data, 'result', 'data') || {} diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb index e3756a8c9f6..10bb358a292 100644 --- a/lib/gitlab/metrics/subscribers/active_record.rb +++ b/lib/gitlab/metrics/subscribers/active_record.rb @@ -9,7 +9,6 @@ module Gitlab attach_to :active_record - IGNORABLE_SQL = %w{BEGIN COMMIT}.freeze DB_COUNTERS = %i{count write_count cached_count}.freeze SQL_COMMANDS_WITH_COMMENTS_REGEX = %r{\A(/\*.*\*/\s)?((?!(.*[^\w'"](DELETE|UPDATE|INSERT INTO)[^\w'"])))(WITH.*)?(SELECT)((?!(FOR UPDATE|FOR SHARE)).)*$}i.freeze @@ -114,7 +113,7 @@ module Gitlab end def ignored_query?(payload) - payload[:name] == 'SCHEMA' || IGNORABLE_SQL.include?(payload[:sql]) + payload[:name] == 'SCHEMA' || payload[:name] == 'TRANSACTION' end def cached_query?(payload) diff --git a/lib/gitlab/metrics/subscribers/external_http.rb b/lib/gitlab/metrics/subscribers/external_http.rb index ff8654a2cec..87756b14887 100644 --- a/lib/gitlab/metrics/subscribers/external_http.rb +++ b/lib/gitlab/metrics/subscribers/external_http.rb @@ -13,6 +13,10 @@ module Gitlab DETAIL_STORE = :external_http_detail_store COUNTER = :external_http_count DURATION = :external_http_duration_s + SLOW_REQUESTS = :external_http_slow_requests + + THRESHOLD_SLOW_REQUEST_S = 5.0 + MAX_SLOW_REQUESTS = 10 def self.detail_store ::Gitlab::SafeRequestStore[DETAIL_STORE] ||= [] @@ -26,11 +30,24 @@ module Gitlab Gitlab::SafeRequestStore[COUNTER].to_i end + def self.slow_requests + Gitlab::SafeRequestStore[SLOW_REQUESTS] + end + + def self.top_slowest_requests + requests = slow_requests + + return unless requests.present? + + requests.sort_by { |req| req[:duration_s] }.reverse.first(MAX_SLOW_REQUESTS) + end + def self.payload { COUNTER => request_count, - DURATION => duration - } + DURATION => duration, + SLOW_REQUESTS => top_slowest_requests + }.compact end def request(event) @@ -69,6 +86,17 @@ module Gitlab Gitlab::SafeRequestStore[COUNTER] = Gitlab::SafeRequestStore[COUNTER].to_i + 1 Gitlab::SafeRequestStore[DURATION] = Gitlab::SafeRequestStore[DURATION].to_f + payload[:duration].to_f + + if payload[:duration].to_f > THRESHOLD_SLOW_REQUEST_S + Gitlab::SafeRequestStore[SLOW_REQUESTS] ||= [] + Gitlab::SafeRequestStore[SLOW_REQUESTS] << { + method: payload[:method], + host: payload[:host], + port: payload[:port], + path: payload[:path], + duration_s: payload[:duration].to_f.round(3) + } + end end def expose_metrics(payload) diff --git a/lib/gitlab/metrics/subscribers/rails_cache.rb b/lib/gitlab/metrics/subscribers/rails_cache.rb index d2b6d0e3c14..1759c0544b1 100644 --- a/lib/gitlab/metrics/subscribers/rails_cache.rb +++ b/lib/gitlab/metrics/subscribers/rails_cache.rb @@ -84,7 +84,7 @@ module Gitlab :gitlab_cache_operation_duration_seconds, 'Cache access time', {}, - [0.00001, 0.0001, 0.001, 0.01, 0.1, 1.0] + Gitlab::Instrumentation::Redis::QUERY_TIME_BUCKETS ) end end diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb index 13f7ab36823..4da5fef9fd7 100644 --- a/lib/gitlab/middleware/go.rb +++ b/lib/gitlab/middleware/go.rb @@ -18,7 +18,7 @@ module Gitlab request = ActionDispatch::Request.new(env) render_go_doc(request) || @app.call(env) - rescue Gitlab::Auth::IpBlacklisted + rescue Gitlab::Auth::IpBlocked Gitlab::AuthLogger.error( message: 'Rack_Attack', status: 403, diff --git a/lib/gitlab/octokit/middleware.rb b/lib/gitlab/octokit/middleware.rb index a92860f7eb8..f944f9827a3 100644 --- a/lib/gitlab/octokit/middleware.rb +++ b/lib/gitlab/octokit/middleware.rb @@ -11,7 +11,8 @@ module Gitlab Gitlab::UrlBlocker.validate!(env[:url], schemes: %w[http https], allow_localhost: allow_local_requests?, - allow_local_network: allow_local_requests? + allow_local_network: allow_local_requests?, + dns_rebind_protection: dns_rebind_protection? ) @app.call(env) @@ -19,6 +20,10 @@ module Gitlab private + def dns_rebind_protection? + Gitlab::CurrentSettings.dns_rebinding_protection_enabled? + end + def allow_local_requests? Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services? end diff --git a/lib/gitlab/pages/deployment_update.rb b/lib/gitlab/pages/deployment_update.rb index 2f5c6938e2a..6845f5d88ec 100644 --- a/lib/gitlab/pages/deployment_update.rb +++ b/lib/gitlab/pages/deployment_update.rb @@ -46,9 +46,13 @@ module Gitlab end end + def root_dir + build.options[:publish] || PUBLIC_DIR + end + # Calculate page size after extract def total_size - @total_size ||= build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true).total_size + @total_size ||= build.artifacts_metadata_entry("#{root_dir}/", recursive: true).total_size end def max_size_from_settings @@ -74,7 +78,10 @@ module Gitlab def validate_public_folder if total_size <= 0 - errors.add(:base, 'Error: The `public/` folder is missing, or not declared in `.gitlab-ci.yml`.') + errors.add( + :base, + 'Error: You need to either include a `public/` folder in your artifacts, or specify ' \ + 'which one to use for Pages using `publish` in `.gitlab-ci.yml`') end end diff --git a/lib/gitlab/pages/virtual_host_finder.rb b/lib/gitlab/pages/virtual_host_finder.rb index 87fbf547770..5fec60188f8 100644 --- a/lib/gitlab/pages/virtual_host_finder.rb +++ b/lib/gitlab/pages/virtual_host_finder.rb @@ -27,10 +27,9 @@ module Gitlab attr_reader :host def by_unique_domain(name) - return unless Feature.enabled?(:pages_unique_domain) - project = Project.by_pages_enabled_unique_domain(name) + return unless Feature.enabled?(:pages_unique_domain, project) return unless project&.pages_deployed? ::Pages::VirtualDomain.new(projects: [project]) diff --git a/lib/gitlab/pagination/keyset.rb b/lib/gitlab/pagination/keyset.rb index 67a5530d46c..56017ba846c 100644 --- a/lib/gitlab/pagination/keyset.rb +++ b/lib/gitlab/pagination/keyset.rb @@ -3,12 +3,12 @@ module Gitlab module Pagination module Keyset - SUPPORTED_TYPES = [ + SUPPORTED_TYPES = %w[ Project ].freeze def self.available_for_type?(relation) - SUPPORTED_TYPES.include?(relation.klass) + SUPPORTED_TYPES.include?(relation.klass.to_s) end def self.available?(request_context, relation) diff --git a/lib/gitlab/project_template.rb b/lib/gitlab/project_template.rb index 5394cd115b1..485deb0fb19 100644 --- a/lib/gitlab/project_template.rb +++ b/lib/gitlab/project_template.rb @@ -81,8 +81,9 @@ module Gitlab ProjectTemplate.new('jsonnet', 'Jsonnet for Dynamic Child Pipelines', _('An example showing how to use Jsonnet with GitLab dynamic child pipelines'), 'https://gitlab.com/gitlab-org/project-templates/jsonnet'), ProjectTemplate.new('cluster_management', 'GitLab Cluster Management', _('An example project for managing Kubernetes clusters integrated with GitLab'), 'https://gitlab.com/gitlab-org/project-templates/cluster-management'), ProjectTemplate.new('kotlin_native_linux', 'Kotlin Native Linux', _('A basic template for developing Linux programs using Kotlin Native'), 'https://gitlab.com/gitlab-org/project-templates/kotlin-native-linux'), - ProjectTemplate.new('typo3_distribution', 'TYPO3 Distribution', _('A template for starting a new TYPO3 project'), 'https://gitlab.com/gitlab-org/project-templates/typo3-distribution', 'illustrations/logos/typo3.svg') - ].freeze + ProjectTemplate.new('typo3_distribution', 'TYPO3 Distribution', _('A template for starting a new TYPO3 project'), 'https://gitlab.com/gitlab-org/project-templates/typo3-distribution', 'illustrations/logos/typo3.svg'), + ProjectTemplate.new('laravel', 'Laravel Framework', _('A basic folder structure of a Laravel application, to help you get started.'), 'https://gitlab.com/gitlab-org/project-templates/laravel', 'illustrations/logos/laravel.svg') + ] end # rubocop:enable Metrics/AbcSize diff --git a/lib/gitlab/quick_actions/issue_actions.rb b/lib/gitlab/quick_actions/issue_actions.rb index ae8bc102f57..10e8c702826 100644 --- a/lib/gitlab/quick_actions/issue_actions.rb +++ b/lib/gitlab/quick_actions/issue_actions.rb @@ -259,7 +259,8 @@ module Gitlab current_user.can?(:"set_#{quick_action_target.issue_type}_metadata", quick_action_target) end command :promote_to_incident do - @updates[:issue_type] = "incident" + @updates[:issue_type] = :incident + @updates[:work_item_type] = ::WorkItems::Type.default_by_type(:incident) end desc { _('Add customer relation contacts') } diff --git a/lib/gitlab/reference_extractor.rb b/lib/gitlab/reference_extractor.rb index 540394f04bd..783b68fac12 100644 --- a/lib/gitlab/reference_extractor.rb +++ b/lib/gitlab/reference_extractor.rb @@ -4,7 +4,7 @@ module Gitlab # Extract possible GFM references from an arbitrary String for further processing. class ReferenceExtractor < Banzai::ReferenceExtractor REFERABLES = %i(user issue label milestone mentioned_user mentioned_group mentioned_project - merge_request snippet commit commit_range directly_addressed_user epic iteration vulnerability + merge_request snippet commit commit_range directly_addressed_user epic vulnerability alert).freeze attr_accessor :project, :current_user, :author @@ -64,18 +64,24 @@ module Gitlab end def all - REFERABLES.each { |referable| send(referable.to_s.pluralize) } # rubocop:disable GitlabSecurity/PublicSend + self.class.referrables.each { |referable| send(referable.to_s.pluralize) } # rubocop:disable GitlabSecurity/PublicSend @references.values.flatten end - def self.references_pattern - return @pattern if @pattern + class << self + def references_pattern + return @pattern if @pattern - patterns = REFERABLES.map do |type| - Banzai::ReferenceParser[type].reference_class.try(:reference_pattern) - end.uniq + patterns = referrables.map do |type| + Banzai::ReferenceParser[type].reference_class.try(:reference_pattern) + end.uniq - @pattern = Regexp.union(patterns.compact) + @pattern = Regexp.union(patterns.compact) + end + + def referrables + @referrables ||= REFERABLES + end end private @@ -90,3 +96,5 @@ module Gitlab end end end + +Gitlab::ReferenceExtractor.prepend_mod diff --git a/lib/gitlab/regex.rb b/lib/gitlab/regex.rb index de6eba9b9c9..3640edbaa26 100644 --- a/lib/gitlab/regex.rb +++ b/lib/gitlab/regex.rb @@ -258,38 +258,45 @@ module Gitlab end end - extend self - extend Packages + module BulkImports + def bulk_import_destination_namespace_path_regex + # This regexp validates the string conforms to rules for a destination_namespace path: + # i.e does not start with a non-alphanumeric character, + # contains only alphanumeric characters, forward slashes, periods, and underscores, + # does not end with a period or forward slash, and has a relative path structure + # with no http protocol chars or leading or trailing forward slashes + # eg 'source/full/path' or 'destination_namespace' not 'https://example.com/destination/namespace/path' + # the regex also allows for an empty string ('') to be accepted as this is allowed in + # a bulk_import POST request + @bulk_import_destination_namespace_path_regex ||= %r/((\A\z)|(\A[0-9a-z]*(-_.)?[0-9a-z])(\/?[0-9a-z]*[-_.]?[0-9a-z])+\z)/i + end - def bulk_import_destination_namespace_path_regex - # This regexp validates the string conforms to rules for a destination_namespace path: - # i.e does not start with a non-alphanumeric character except for periods or underscores, - # contains only alphanumeric characters, forward slashes, periods, and underscores, - # does not end with a period or forward slash, and has a relative path structure - # with no http protocol chars or leading or trailing forward slashes - # eg 'source/full/path' or 'destination_namespace' not 'https://example.com/destination/namespace/path' - # the regex also allows for an empty string ('') to be accepted as this is allowed in - # a bulk_import POST request - @bulk_import_destination_namespace_path_regex ||= %r/((\A\z)|\A([.]?)\w*([0-9a-z][-_]*)(\/?[.]?[0-9a-z][-_]*)+\z)/i - end + def bulk_import_source_full_path_regex + # This regexp validates the string conforms to rules for a source_full_path path: + # i.e does not start with a non-alphanumeric character except for periods or underscores, + # contains only alphanumeric characters, forward slashes, periods, and underscores, + # does not end with a period or forward slash, and has a relative path structure + # with no http protocol chars or leading or trailing forward slashes + # eg 'source/full/path' or 'destination_namespace' not 'https://example.com/source/full/path' + @bulk_import_source_full_path_regex ||= %r/\A([.]?)[^\W](\/?([-_.+]*)*[0-9a-z][-_]*)+\z/i + end - def bulk_import_source_full_path_regex - # This regexp validates the string conforms to rules for a source_full_path path: - # i.e does not start with a non-alphanumeric character except for periods or underscores, - # contains only alphanumeric characters, forward slashes, periods, and underscores, - # does not end with a period or forward slash, and has a relative path structure - # with no http protocol chars or leading or trailing forward slashes - # eg 'source/full/path' or 'destination_namespace' not 'https://example.com/source/full/path' - @bulk_import_source_full_path_regex ||= %r/\A([.]?)[^\W](\/?[.]?[0-9a-z][-_]*)+\z/i - end + def bulk_import_source_full_path_regex_message + bulk_import_destination_namespace_path_regex_message + end - def bulk_import_destination_namespace_path_regex_message - "cannot start with a non-alphanumeric character except for periods or underscores, " \ - "can contain only alphanumeric characters, forward slashes, periods, and underscores, " \ - "cannot end with a period or forward slash, and has a relative path structure " \ - "with no http protocol chars or leading or trailing forward slashes" \ + def bulk_import_destination_namespace_path_regex_message + "must have a relative path structure " \ + "with no HTTP protocol characters, or leading or trailing forward slashes. " \ + "Path segments must not start or end with a special character, " \ + "and must not contain consecutive special characters." + end end + extend self + extend Packages + extend BulkImports + def group_path_regex # This regexp validates the string conforms to rules for a group slug: # i.e does not start with a non-alphanumeric character except for periods or underscores, @@ -302,7 +309,7 @@ module Gitlab def group_path_regex_message "cannot start with a non-alphanumeric character except for periods or underscores, " \ "can contain only alphanumeric characters, periods, and underscores, " \ - "cannot end with a period or forward slash, and has no leading or trailing forward slashes" \ + "cannot end with a period or forward slash, and has no leading or trailing forward slashes." \ end def project_name_regex @@ -459,7 +466,7 @@ module Gitlab # ``` MARKDOWN_CODE_BLOCK_REGEX_UNTRUSTED = '(?P<code>' \ - '^```\n' \ + '^```.*?\n' \ '(?:\n|.)*?' \ '\n```\ *$' \ ')'.freeze @@ -477,6 +484,17 @@ module Gitlab ) }mx.freeze + # HTML block: + # <tag> + # Anything, including `>>>` blocks which are ignored by this filter + # </tag> + MARKDOWN_HTML_BLOCK_REGEX_UNTRUSTED = + '(?P<html>' \ + '^<[^>]+?>\ *\n' \ + '(?:\n|.)*?' \ + '\n<\/[^>]+?>\ *$' \ + ')'.freeze + # HTML comment line: # <!-- some commented text --> MARKDOWN_HTML_COMMENT_LINE_REGEX_UNTRUSTED = @@ -499,6 +517,13 @@ module Gitlab }mx.freeze end + def markdown_code_or_html_blocks_untrusted + @markdown_code_or_html_blocks_untrusted ||= + "#{MARKDOWN_CODE_BLOCK_REGEX_UNTRUSTED}" \ + "|" \ + "#{MARKDOWN_HTML_BLOCK_REGEX_UNTRUSTED}" + end + def markdown_code_or_html_comments_untrusted @markdown_code_or_html_comments_untrusted ||= "#{MARKDOWN_CODE_BLOCK_REGEX_UNTRUSTED}" \ @@ -508,6 +533,17 @@ module Gitlab "#{MARKDOWN_HTML_COMMENT_BLOCK_REGEX_UNTRUSTED}" end + def markdown_code_or_html_blocks_or_html_comments_untrusted + @markdown_code_or_html_comments_untrusted ||= + "#{MARKDOWN_CODE_BLOCK_REGEX_UNTRUSTED}" \ + "|" \ + "#{MARKDOWN_HTML_BLOCK_REGEX_UNTRUSTED}" \ + "|" \ + "#{MARKDOWN_HTML_COMMENT_LINE_REGEX_UNTRUSTED}" \ + "|" \ + "#{MARKDOWN_HTML_COMMENT_BLOCK_REGEX_UNTRUSTED}" + end + # Based on Jira's project key format # https://confluence.atlassian.com/adminjiraserver073/changing-the-project-key-format-861253229.html # Avoids linking CVE IDs (https://cve.mitre.org/cve/identifiers/syntaxchange.html#new) as Jira issues. diff --git a/lib/gitlab/resource_events/assignment_event_recorder.rb b/lib/gitlab/resource_events/assignment_event_recorder.rb new file mode 100644 index 00000000000..94bd05a17ba --- /dev/null +++ b/lib/gitlab/resource_events/assignment_event_recorder.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module Gitlab + module ResourceEvents + class AssignmentEventRecorder + BATCH_SIZE = 100 + + def initialize(parent:, old_assignees:) + @parent = parent + @old_assignees = old_assignees + end + + def record + return if Feature.disabled?(:record_issue_and_mr_assignee_events, parent.project) + + case parent + when Issue + record_for_parent( + ::ResourceEvents::IssueAssignmentEvent, + :issue_id, + parent, + old_assignees + ) + when MergeRequest + record_for_parent( + ::ResourceEvents::MergeRequestAssignmentEvent, + :merge_request_id, + parent, + old_assignees + ) + end + end + + private + + attr_reader :parent, :old_assignees + + def record_for_parent(resource_klass, foreign_key, parent, old_assignees) + removed_events = (old_assignees - parent.assignees).map do |unassigned_user| + { + foreign_key => parent.id, + user_id: unassigned_user.id, + action: :remove + } + end.to_set + + added_events = (parent.assignees.to_a - old_assignees).map do |added_user| + { + foreign_key => parent.id, + user_id: added_user.id, + action: :add + } + end.to_set + + (removed_events + added_events).each_slice(BATCH_SIZE) do |events| + resource_klass.insert_all(events) + end + end + end + end +end diff --git a/lib/gitlab/service_desk.rb b/lib/gitlab/service_desk.rb index b3d6e890e03..5acbde552c8 100644 --- a/lib/gitlab/service_desk.rb +++ b/lib/gitlab/service_desk.rb @@ -10,7 +10,7 @@ module Gitlab end def self.supported? - Gitlab::IncomingEmail.enabled? && Gitlab::IncomingEmail.supports_wildcard? + Gitlab::Email::IncomingEmail.enabled? && Gitlab::Email::IncomingEmail.supports_wildcard? end end end diff --git a/lib/gitlab/service_desk_email.rb b/lib/gitlab/service_desk_email.rb deleted file mode 100644 index bc49efafdda..00000000000 --- a/lib/gitlab/service_desk_email.rb +++ /dev/null @@ -1,26 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module ServiceDeskEmail - class << self - include Gitlab::Email::Common - - def config - Gitlab.config.service_desk_email - end - - def key_from_address(address) - wildcard_address = config&.address - return unless wildcard_address - - Gitlab::IncomingEmail.key_from_address(address, wildcard_address: wildcard_address) - end - - def address_for_key(key) - return if config.address.blank? - - config.address.sub(WILDCARD_PLACEHOLDER, key) - end - end - end -end diff --git a/lib/gitlab/setup_helper.rb b/lib/gitlab/setup_helper.rb index 1e42003b203..2e09a4fce12 100644 --- a/lib/gitlab/setup_helper.rb +++ b/lib/gitlab/setup_helper.rb @@ -80,7 +80,7 @@ module Gitlab # because it uses a Unix socket. # For development and testing purposes, an extra storage is added to gitaly, # which is not known to Rails, but must be explicitly stubbed. - def configuration_toml(gitaly_dir, storage_paths, options, gitaly_ruby: true) + def configuration_toml(gitaly_dir, storage_paths, options) storages = [] address = nil @@ -128,7 +128,6 @@ module Gitlab FileUtils.mkdir(runtime_dir) unless File.exist?(runtime_dir) config[:runtime_dir] = runtime_dir - config[:'gitaly-ruby'] = { dir: File.join(gitaly_dir, 'ruby') } if gitaly_ruby config[:'gitlab-shell'] = { dir: Gitlab.config.gitlab_shell.path } config[:bin_dir] = File.expand_path(File.join(gitaly_dir, '_build', 'bin')) # binaries by default are in `_build/bin` config[:gitlab] = { url: Gitlab.config.gitlab.url } diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb index b6e2209b475..bb87104630c 100644 --- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb +++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb @@ -220,7 +220,12 @@ module Gitlab end def cookie_key - "#{idempotency_key}:cookie:v2" + # This duplicates `Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE` both here and in `#idempotency_key` + # This is because `Sidekiq.redis` used to add this prefix automatically through `redis-namespace` + # and we did not notice this in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25447 + # Now we're keeping this as-is to avoid a key-migration when redis-namespace gets + # removed from Sidekiq: https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/944 + "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:#{idempotency_key}:cookie:v2" end def get_cookie @@ -252,7 +257,7 @@ module Gitlab end def with_redis(&block) - Sidekiq.redis(&block) # rubocop:disable Cop/SidekiqRedisCall + Gitlab::Redis::Queues.with(&block) # rubocop:disable Cop/RedisQueueUsage, CodeReuse/ActiveRecord end end end diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb index 0fc95534e2a..b065190f656 100644 --- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb +++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/deduplicates_when_scheduling.rb @@ -23,7 +23,7 @@ module Gitlab duplicate_job.set_deduplicated_flag!(expiry) Gitlab::SidekiqLogging::DeduplicationLogger.instance.deduplicated_log( - job, "dropped #{strategy_name}", duplicate_job.options) + job, strategy_name, duplicate_job.options) return false end end diff --git a/lib/gitlab/slash_commands/incident_management/incident_new.rb b/lib/gitlab/slash_commands/incident_management/incident_new.rb index ce91edfd51a..a43235bdeb6 100644 --- a/lib/gitlab/slash_commands/incident_management/incident_new.rb +++ b/lib/gitlab/slash_commands/incident_management/incident_new.rb @@ -5,7 +5,7 @@ module Gitlab module IncidentManagement class IncidentNew < IncidentCommand def self.help_message - 'incident declare' + 'incident declare *(Beta)*' end def self.allowed?(_project, _user) diff --git a/lib/gitlab/slug/environment.rb b/lib/gitlab/slug/environment.rb index fd70def8e7c..2305fcd0061 100644 --- a/lib/gitlab/slug/environment.rb +++ b/lib/gitlab/slug/environment.rb @@ -21,7 +21,7 @@ module Gitlab slugified = name.to_s.downcase.gsub(/[^a-z0-9]/, '-') # Must start with a letter - slugified = 'env-' + slugified unless slugified.match?(/^[a-z]/) + slugified = +"env-#{slugified}" unless slugified.match?(/^[a-z]/) # Repeated dashes are invalid (OpenShift limitation) slugified.squeeze!('-') diff --git a/lib/gitlab/subscription_portal.rb b/lib/gitlab/subscription_portal.rb index 7494f0584d0..6d77acd7f33 100644 --- a/lib/gitlab/subscription_portal.rb +++ b/lib/gitlab/subscription_portal.rb @@ -70,6 +70,10 @@ module Gitlab "#{self.subscriptions_url}/gitlab/namespaces/#{group_id}/renew" end + def self.subscriptions_legacy_sign_in_url + "#{self.subscriptions_url}/customers/sign_in?legacy=true" + end + def self.edit_account_url "#{self.subscriptions_url}/customers/edit" end @@ -90,6 +94,7 @@ end Gitlab::SubscriptionPortal.prepend_mod Gitlab::SubscriptionPortal::SUBSCRIPTIONS_URL = Gitlab::SubscriptionPortal.subscriptions_url.freeze +Gitlab::SubscriptionPortal::SUBSCRIPTIONS_LEGACY_SIGN_IN_URL = Gitlab::SubscriptionPortal.subscriptions_legacy_sign_in_url.freeze Gitlab::SubscriptionPortal::PAYMENT_FORM_URL = Gitlab::SubscriptionPortal.payment_form_url.freeze Gitlab::SubscriptionPortal::PAYMENT_VALIDATION_FORM_ID = Gitlab::SubscriptionPortal.payment_validation_form_id.freeze Gitlab::SubscriptionPortal::RENEWAL_SERVICE_EMAIL = Gitlab::SubscriptionPortal.renewal_service_email.freeze diff --git a/lib/gitlab/tracking.rb b/lib/gitlab/tracking.rb index 45f836f10d3..ef86c9d6007 100644 --- a/lib/gitlab/tracking.rb +++ b/lib/gitlab/tracking.rb @@ -8,13 +8,30 @@ module Gitlab end def event(category, action, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists - contexts = [Tracking::StandardContext.new(project: project, user: user, namespace: namespace, **extra).to_context, *context] + action = action.to_s + contexts = [ + Tracking::StandardContext.new( + project: project, + user: user, + namespace: namespace, + **extra).to_context, *context + ] + + track_struct_event(tracker, category, action, label: label, property: property, value: value, contexts: contexts) + end + def database_event(category, action, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists action = action.to_s + destination = Gitlab::Tracking::Destinations::DatabaseEventsSnowplow.new + contexts = [ + Tracking::StandardContext.new( + project: project, + user: user, + namespace: namespace, + **extra).to_context, *context + ] - tracker.event(category, action, label: label, property: property, value: value, context: contexts) - rescue StandardError => error - Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error, snowplow_category: category, snowplow_action: action) + track_struct_event(destination, category, action, label: label, property: property, value: value, contexts: contexts) end def definition(basename, category: nil, action: nil, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists @@ -48,6 +65,13 @@ module Gitlab private + def track_struct_event(destination, category, action, label:, property:, value:, contexts:) # rubocop:disable Metrics/ParameterLists + destination + .event(category, action, label: label, property: property, value: value, context: contexts) + rescue StandardError => error + Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error, snowplow_category: category, snowplow_action: action) + end + def tracker @tracker ||= if snowplow_micro_enabled? Gitlab::Tracking::Destinations::SnowplowMicro.new diff --git a/lib/gitlab/tracking/destinations/database_events_snowplow.rb b/lib/gitlab/tracking/destinations/database_events_snowplow.rb new file mode 100644 index 00000000000..4f9cd2167f7 --- /dev/null +++ b/lib/gitlab/tracking/destinations/database_events_snowplow.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +module Gitlab + module Tracking + module Destinations + class DatabaseEventsSnowplow < Snowplow + extend ::Gitlab::Utils::Override + + HOSTNAME = 'localhost:9091' + + override :enabled? + # database events are only collected for SaaS instance + def enabled? + ::Gitlab.dev_or_test_env? || ::Gitlab.com? + end + + override :hostname + def hostname + HOSTNAME + end + + private + + override :increment_failed_events_emissions + def increment_failed_events_emissions(value) + Gitlab::Metrics.counter( + :gitlab_db_events_snowplow_failed_events_total, + 'Number of failed Snowplow events emissions' + ).increment({}, value.to_i) + end + + override :increment_successful_events_emissions + def increment_successful_events_emissions(value) + Gitlab::Metrics.counter( + :gitlab_db_events_snowplow_successful_events_total, + 'Number of successful Snowplow events emissions' + ).increment({}, value.to_i) + end + + override :increment_total_events_counter + def increment_total_events_counter + Gitlab::Metrics.counter( + :gitlab_db_events_snowplow_events_total, + 'Number of Snowplow events' + ).increment + end + end + end + end +end diff --git a/lib/gitlab/untrusted_regexp.rb b/lib/gitlab/untrusted_regexp.rb index 7c7bda3a8f9..b7817a0c141 100644 --- a/lib/gitlab/untrusted_regexp.rb +++ b/lib/gitlab/untrusted_regexp.rb @@ -29,6 +29,27 @@ module Gitlab RE2.GlobalReplace(text, regexp, rewrite) end + # There is no built-in replace with block support (like `gsub`). We can accomplish + # the same thing by parsing and rebuilding the string with the substitutions. + def replace_gsub(text) + new_text = +'' + remainder = text + + matched = match(remainder) + + until matched.nil? || matched.to_a.compact.empty? + partitioned = remainder.partition(matched.to_s) + new_text << partitioned.first + remainder = partitioned.last + + new_text << yield(matched) + + matched = match(remainder) + end + + new_text << remainder + end + def scan(text) matches = scan_regexp.scan(text).to_a matches.map!(&:first) if regexp.number_of_capturing_groups == 0 diff --git a/lib/gitlab/url_blocker.rb b/lib/gitlab/url_blocker.rb index 1be9190e5f8..2c02874876a 100644 --- a/lib/gitlab/url_blocker.rb +++ b/lib/gitlab/url_blocker.rb @@ -12,7 +12,7 @@ module Gitlab class << self # Validates the given url according to the constraints specified by arguments. # - # ports - Raises error if the given URL port does is not between given ports. + # ports - Raises error if the given URL port is not between given ports. # allow_localhost - Raises error if URL resolves to a localhost IP address and argument is false. # allow_local_network - Raises error if URL resolves to a link-local address and argument is false. # allow_object_storage - Avoid raising an error if URL resolves to an object storage endpoint and argument is true. @@ -62,6 +62,10 @@ module Gitlab end ip_address = ip_address(address_info) + + # Ignore DNS rebind protection when a proxy is being used, as DNS + # rebinding is expected behavior. + dns_rebind_protection &= !uri_under_proxy_setting?(uri, ip_address) return [uri, nil] if domain_in_allow_list?(uri) protected_uri_with_hostname = enforce_uri_hostname(ip_address, uri, dns_rebind_protection) @@ -126,6 +130,18 @@ module Gitlab validate_unicode_restriction(uri) if ascii_only end + def uri_under_proxy_setting?(uri, ip_address) + return false unless Gitlab.http_proxy_env? + # `no_proxy|NO_PROXY` specifies addresses for which the proxy is not + # used. If it's empty, there are no exceptions and this URI + # will be under proxy settings. + return true if no_proxy_env.blank? + + # `no_proxy|NO_PROXY` is being used. We must check whether it + # applies to this specific URI. + ::URI::Generic.use_proxy?(uri.hostname, ip_address, get_port(uri), no_proxy_env) + end + # Returns addrinfo object for the URI. # # @param uri [Addressable::URI] @@ -151,9 +167,12 @@ module Gitlab # Enforce if the instance should block requests return true if deny_all_requests_except_allowed?(deny_all_requests_except_allowed) - # Do not enforce unless DNS rebinding protection is enabled + # Do not enforce if DNS rebinding protection is disabled return false unless dns_rebind_protection + # Do not enforce if proxy is used + return false if Gitlab.http_proxy_env? + # In the test suite we use a lot of mocked urls that are either invalid or # don't exist. In order to avoid modifying a ton of tests and factories # we allow invalid urls unless the environment variable RSPEC_ALLOW_INVALID_URLS @@ -364,6 +383,10 @@ module Gitlab def config Gitlab.config end + + def no_proxy_env + ENV['no_proxy'] || ENV['NO_PROXY'] + end end end end diff --git a/lib/gitlab/url_blockers/ip_allowlist_entry.rb b/lib/gitlab/url_blockers/ip_allowlist_entry.rb index b293afe166c..ff4eb86ec41 100644 --- a/lib/gitlab/url_blockers/ip_allowlist_entry.rb +++ b/lib/gitlab/url_blockers/ip_allowlist_entry.rb @@ -12,11 +12,32 @@ module Gitlab end def match?(requested_ip, requested_port = nil) - return false unless ip.include?(requested_ip) + requested_ip = IPAddr.new(requested_ip) if requested_ip.is_a?(String) + + return false unless ip_include?(requested_ip) return true if port.nil? port == requested_port end + + private + + # Prior to ipaddr v1.2.3, if the allow list were the IPv4 to IPv6 + # mapped address ::ffff:169.254.168.100 and the requested IP were + # 169.254.168.100 or ::ffff:169.254.168.100, the IP would be + # considered in the allow list. However, with + # https://github.com/ruby/ipaddr/pull/31, IPAddr#include? will + # only match if the IP versions are the same. This method + # preserves backwards compatibility if the versions differ by + # checking inclusion by coercing an IPv4 address to its IPv6 + # mapped address. + def ip_include?(requested_ip) + return true if ip.include?(requested_ip) + return ip.include?(requested_ip.ipv4_mapped) if requested_ip.ipv4? && ip.ipv6? + return ip.ipv4_mapped.include?(requested_ip) if requested_ip.ipv6? && ip.ipv4? + + false + end end end end diff --git a/lib/gitlab/usage/metrics/instrumentations/database_mode.rb b/lib/gitlab/usage/metrics/instrumentations/database_mode.rb new file mode 100644 index 00000000000..1b97ef4a1d2 --- /dev/null +++ b/lib/gitlab/usage/metrics/instrumentations/database_mode.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module Gitlab + module Usage + module Metrics + module Instrumentations + class DatabaseMode < GenericMetric + value do + Gitlab::Database.database_mode + end + end + end + end + end +end diff --git a/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric.rb b/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric.rb index ab9c6f87023..be3b3b3d682 100644 --- a/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric.rb +++ b/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric.rb @@ -6,7 +6,7 @@ module Gitlab module Instrumentations class IncomingEmailEncryptedSecretsEnabledMetric < GenericMetric value do - Gitlab::IncomingEmail.encrypted_secrets.active? + Gitlab::Email::IncomingEmail.encrypted_secrets.active? end end end diff --git a/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric.rb b/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric.rb index 4332043de8a..5e38339801b 100644 --- a/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric.rb +++ b/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric.rb @@ -6,7 +6,7 @@ module Gitlab module Instrumentations class ServiceDeskEmailEncryptedSecretsEnabledMetric < GenericMetric value do - Gitlab::ServiceDeskEmail.encrypted_secrets.active? + Gitlab::Email::ServiceDeskEmail.encrypted_secrets.active? end end end diff --git a/lib/gitlab/usage/service_ping_report.rb b/lib/gitlab/usage/service_ping_report.rb index 1eda72ba570..3bc941260d6 100644 --- a/lib/gitlab/usage/service_ping_report.rb +++ b/lib/gitlab/usage/service_ping_report.rb @@ -9,7 +9,9 @@ module Gitlab def for(output:, cached: false) case output.to_sym when :all_metrics_values - with_instrumentation_classes(all_metrics_values(cached), :with_value) + Rails.cache.fetch(CACHE_KEY, force: !cached, expires_in: 2.weeks) do + with_instrumentation_classes(Gitlab::UsageData.data, :with_value) + end when :metrics_queries with_instrumentation_classes(metrics_queries, :with_instrumentation) when :non_sql_metrics_values @@ -27,12 +29,6 @@ module Gitlab old_payload.with_indifferent_access.deep_merge(instrumented_payload) end - def all_metrics_values(cached) - Rails.cache.fetch(CACHE_KEY, force: !cached, expires_in: 2.weeks) do - Gitlab::UsageData.data - end - end - def metrics_queries Gitlab::UsageDataQueries.data end diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb index 52b8d70c113..01252a705f0 100644 --- a/lib/gitlab/usage_data.rb +++ b/lib/gitlab/usage_data.rb @@ -240,7 +240,7 @@ module Gitlab omniauth_enabled: alt_usage_data(fallback: nil) { Gitlab::Auth.omniauth_enabled? }, prometheus_enabled: alt_usage_data(fallback: nil) { Gitlab::Prometheus::Internal.prometheus_enabled? }, prometheus_metrics_enabled: alt_usage_data(fallback: nil) { Gitlab::Metrics.prometheus_metrics_enabled? }, - reply_by_email_enabled: alt_usage_data(fallback: nil) { Gitlab::IncomingEmail.enabled? }, + reply_by_email_enabled: alt_usage_data(fallback: nil) { Gitlab::Email::IncomingEmail.enabled? }, web_ide_clientside_preview_enabled: alt_usage_data(fallback: nil) { false }, signup_enabled: alt_usage_data(fallback: nil) { Gitlab::CurrentSettings.allow_signup? }, grafana_link_enabled: alt_usage_data(fallback: nil) { Gitlab::CurrentSettings.grafana_enabled? }, @@ -370,16 +370,6 @@ module Gitlab } end - def merge_requests_users(time_period) - redis_usage_data do - Gitlab::UsageDataCounters::HLLRedisCounter.unique_events( - event_names: :merge_request_action, - start_date: time_period[:created_at].first, - end_date: time_period[:created_at].last - ) - end - end - def installation_type if Rails.env.production? Gitlab::INSTALLATION_TYPE @@ -447,9 +437,7 @@ module Gitlab projects_without_disable_overriding_approvers_per_merge_request: count(::Project.where(time_period.merge(disable_overriding_approvers_per_merge_request: [false, nil]))), remote_mirrors: distinct_count(::Project.with_remote_mirrors.where(time_period), :creator_id), snippets: distinct_count(::Snippet.where(time_period), :author_id) - }.tap do |h| - h[:merge_requests_users] = merge_requests_users(time_period) if time_period.present? - end + } end # rubocop: enable CodeReuse/ActiveRecord diff --git a/lib/gitlab/usage_data_counters/ci_template_unique_counter.rb b/lib/gitlab/usage_data_counters/ci_template_unique_counter.rb index 7f6d67e01c7..97091ff975b 100644 --- a/lib/gitlab/usage_data_counters/ci_template_unique_counter.rb +++ b/lib/gitlab/usage_data_counters/ci_template_unique_counter.rb @@ -2,7 +2,7 @@ module Gitlab::UsageDataCounters class CiTemplateUniqueCounter - REDIS_SLOT = 'ci_templates' + PREFIX = 'ci_templates' KNOWN_EVENTS_FILE_PATH = File.expand_path('known_events/ci_templates.yml', __dir__) class << self @@ -28,7 +28,7 @@ module Gitlab::UsageDataCounters def ci_template_event_name(template_name, config_source) prefix = 'implicit_' if config_source.to_s == 'auto_devops_source' - "p_#{REDIS_SLOT}_#{prefix}#{template_to_event_name(template_name)}" + "p_#{PREFIX}_#{prefix}#{template_to_event_name(template_name)}" end def expand_template_name(template_name) diff --git a/lib/gitlab/usage_data_counters/counter_events/package_events.yml b/lib/gitlab/usage_data_counters/counter_events/package_events.yml index f7ddc53f50d..129bf77c7f0 100644 --- a/lib/gitlab/usage_data_counters/counter_events/package_events.yml +++ b/lib/gitlab/usage_data_counters/counter_events/package_events.yml @@ -7,6 +7,7 @@ - i_package_conan_push_package - i_package_debian_delete_package - i_package_debian_pull_package +- i_package_debian_push_package - i_package_delete_package - i_package_delete_package_by_deploy_token - i_package_delete_package_by_guest diff --git a/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb b/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb index c0d1af8a43a..31f090e0f51 100644 --- a/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb +++ b/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb @@ -37,8 +37,8 @@ module Gitlab ISSUE_DESIGN_COMMENT_REMOVED = 'g_project_management_issue_design_comments_removed' class << self - def track_issue_created_action(author:, project:) - track_snowplow_action(ISSUE_CREATED, author, project) + def track_issue_created_action(author:, namespace:) + track_snowplow_action(ISSUE_CREATED, author, namespace) track_unique_action(ISSUE_CREATED, author) end @@ -179,7 +179,16 @@ module Gitlab private - def track_snowplow_action(event_name, author, project) + def track_snowplow_action(event_name, author, container) + namespace, project = case container + when Project + [container.namespace, container] + when Namespaces::ProjectNamespace + [container.parent, container.project] + else + [container, nil] + end + return unless author Gitlab::Tracking.event( @@ -188,7 +197,7 @@ module Gitlab label: ISSUE_LABEL, property: event_name, project: project, - namespace: project.namespace, + namespace: namespace, user: author, context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_context] ) diff --git a/lib/gitlab/usage_data_counters/known_events/analytics.yml b/lib/gitlab/usage_data_counters/known_events/analytics.yml index 1c390f2d7fd..0b30308b552 100644 --- a/lib/gitlab/usage_data_counters/known_events/analytics.yml +++ b/lib/gitlab/usage_data_counters/known_events/analytics.yml @@ -1,39 +1,26 @@ - name: users_viewing_analytics_group_devops_adoption - redis_slot: analytics aggregation: weekly - name: i_analytics_dev_ops_adoption - redis_slot: analytics aggregation: weekly - name: i_analytics_dev_ops_score - redis_slot: analytics aggregation: weekly - name: i_analytics_instance_statistics - redis_slot: analytics aggregation: weekly - name: p_analytics_pipelines - redis_slot: analytics aggregation: weekly - name: p_analytics_valuestream - redis_slot: analytics aggregation: weekly - name: p_analytics_repo - redis_slot: analytics aggregation: weekly - name: i_analytics_cohorts - redis_slot: analytics aggregation: weekly - name: p_analytics_ci_cd_pipelines - redis_slot: analytics aggregation: weekly - name: p_analytics_ci_cd_deployment_frequency - redis_slot: analytics aggregation: weekly - name: p_analytics_ci_cd_lead_time - redis_slot: analytics aggregation: weekly - name: p_analytics_ci_cd_time_to_restore_service - redis_slot: analytics aggregation: weekly - name: p_analytics_ci_cd_change_failure_rate - redis_slot: analytics aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/ci_templates.yml b/lib/gitlab/usage_data_counters/known_events/ci_templates.yml index e717679e3dc..82c023e6e38 100644 --- a/lib/gitlab/usage_data_counters/known_events/ci_templates.yml +++ b/lib/gitlab/usage_data_counters/known_events/ci_templates.yml @@ -4,455 +4,304 @@ # Do not edit it manually! --- - name: p_ci_templates_terraform_base_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_terraform_base - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_dotnet - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_nodejs - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_openshift - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_auto_devops - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_bash - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_rust - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_elixir - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_clojure - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_crystal - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_getting_started - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_code_quality - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_verify_load_performance_testing - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_verify_accessibility - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_verify_failfast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_verify_browser_performance - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_verify_browser_performance_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_grails - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_sast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dast_runner_validation - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dast_on_demand_scan - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_secret_detection - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_license_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_coverage_fuzzing_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dast_on_demand_api_scan - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_coverage_fuzzing - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_api_fuzzing_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_secure_binaries - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dast_api - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_container_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dast_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_sast_iac - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dependency_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dast_api_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_container_scanning_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_api_fuzzing - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_dast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_api_discovery - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_fortify_fod_sast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_security_sast_iac_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_qualys_iac_security - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_ios_fastlane - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_composer - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_c - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_python - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_android_fastlane - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_android_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_django - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_maven - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_liquibase - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_flutter - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_workflows_branch_pipelines - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_workflows_mergerequest_pipelines - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_laravel - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_kaniko - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_php - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_packer - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_themekit - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_terraform - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_katalon - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_mono - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_go - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_scala - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_latex - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_android - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_indeni_cloudrail - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_matlab - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_deploy_ecs - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_aws_cf_provision_and_deploy_ec2 - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_aws_deploy_ecs - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_gradle - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_chef - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_dast_default_branch_deploy - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_load_performance_testing - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_helm_2to3 - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_sast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_secret_detection - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_license_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_code_intelligence - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_code_quality - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_deploy_ecs - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_deploy_ec2 - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_license_scanning_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_deploy - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_build - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_browser_performance_testing - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_container_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_container_scanning_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_dependency_scanning_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_test - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_sast_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_sast_iac - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_secret_detection_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_dependency_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_deploy_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_browser_performance_testing_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_cf_provision - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_build_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_jobs_sast_iac_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_terraform_latest - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_swift - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_jekyll - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_harp - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_octopress - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_brunch - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_doxygen - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_hyde - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_lektor - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_jbake - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_hexo - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_middleman - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_hugo - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_pelican - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_nanoc - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_swaggerui - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_jigsaw - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_metalsmith - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_gatsby - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_pages_html - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_dart - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_docker - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_julia - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_npm - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_dotnet_core - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_5_minute_production_app - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_ruby - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_auto_devops - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_browser_performance_testing - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_build - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_code_intelligence - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_code_quality - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_container_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_dast_default_branch_deploy - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_dependency_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_deploy - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_deploy_ec2 - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_deploy_ecs - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_helm_2to3 - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_license_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_sast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_secret_detection - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_jobs_test - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_security_container_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_security_dast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_security_dependency_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_security_license_scanning - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_security_sast - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_implicit_security_secret_detection - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_terraform_module_base - redis_slot: ci_templates aggregation: weekly - name: p_ci_templates_terraform_module - redis_slot: ci_templates aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/ci_users.yml b/lib/gitlab/usage_data_counters/known_events/ci_users.yml index 6db10366b83..49757c6e672 100644 --- a/lib/gitlab/usage_data_counters/known_events/ci_users.yml +++ b/lib/gitlab/usage_data_counters/known_events/ci_users.yml @@ -1,6 +1,4 @@ - name: ci_users_executing_deployment_job - redis_slot: ci_users aggregation: weekly - name: ci_users_executing_verify_environment_job - redis_slot: ci_users aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/code_review_events.yml b/lib/gitlab/usage_data_counters/known_events/code_review_events.yml index f64da801c39..db0c0653f63 100644 --- a/lib/gitlab/usage_data_counters/known_events/code_review_events.yml +++ b/lib/gitlab/usage_data_counters/known_events/code_review_events.yml @@ -1,345 +1,233 @@ --- - name: i_code_review_create_note_in_ipynb_diff - redis_slot: code_review aggregation: weekly - name: i_code_review_create_note_in_ipynb_diff_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_create_note_in_ipynb_diff_commit - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_note_in_ipynb_diff - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_note_in_ipynb_diff_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_note_in_ipynb_diff_commit - redis_slot: code_review aggregation: weekly - name: i_code_review_mr_diffs - redis_slot: code_review aggregation: weekly - name: i_code_review_user_single_file_diffs - redis_slot: code_review aggregation: weekly - name: i_code_review_mr_single_file_diffs - redis_slot: code_review aggregation: weekly - name: i_code_review_user_toggled_task_item_status - redis_slot: code_review aggregation: weekly - name: i_code_review_create_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_close_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_reopen_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_approve_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_unapprove_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_resolve_thread - redis_slot: code_review aggregation: weekly - name: i_code_review_user_unresolve_thread - redis_slot: code_review aggregation: weekly - name: i_code_review_edit_mr_title - redis_slot: code_review aggregation: weekly - name: i_code_review_edit_mr_desc - redis_slot: code_review aggregation: weekly - name: i_code_review_user_merge_mr - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_mr_comment - redis_slot: code_review aggregation: weekly - name: i_code_review_user_edit_mr_comment - redis_slot: code_review aggregation: weekly - name: i_code_review_user_remove_mr_comment - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_review_note - redis_slot: code_review aggregation: weekly - name: i_code_review_user_publish_review - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_multiline_mr_comment - redis_slot: code_review aggregation: weekly - name: i_code_review_user_edit_multiline_mr_comment - redis_slot: code_review aggregation: weekly - name: i_code_review_user_remove_multiline_mr_comment - redis_slot: code_review aggregation: weekly - name: i_code_review_user_add_suggestion - redis_slot: code_review aggregation: weekly - name: i_code_review_user_apply_suggestion - redis_slot: code_review aggregation: weekly - name: i_code_review_user_assigned - redis_slot: code_review aggregation: weekly - name: i_code_review_user_marked_as_draft - redis_slot: code_review aggregation: weekly - name: i_code_review_user_unmarked_as_draft - redis_slot: code_review aggregation: weekly - name: i_code_review_user_review_requested - redis_slot: code_review aggregation: weekly - name: i_code_review_user_approval_rule_added - redis_slot: code_review aggregation: weekly - name: i_code_review_user_approval_rule_deleted - redis_slot: code_review aggregation: weekly - name: i_code_review_user_approval_rule_edited - redis_slot: code_review aggregation: weekly - name: i_code_review_user_vs_code_api_request - redis_slot: code_review aggregation: weekly - name: i_code_review_user_jetbrains_api_request - redis_slot: code_review aggregation: weekly - name: i_code_review_user_gitlab_cli_api_request - redis_slot: code_review aggregation: weekly - name: i_code_review_user_create_mr_from_issue - redis_slot: code_review aggregation: weekly - name: i_code_review_user_mr_discussion_locked - redis_slot: code_review aggregation: weekly - name: i_code_review_user_mr_discussion_unlocked - redis_slot: code_review aggregation: weekly - name: i_code_review_user_time_estimate_changed - redis_slot: code_review aggregation: weekly - name: i_code_review_user_time_spent_changed - redis_slot: code_review aggregation: weekly - name: i_code_review_user_assignees_changed - redis_slot: code_review aggregation: weekly - name: i_code_review_user_reviewers_changed - redis_slot: code_review aggregation: weekly - name: i_code_review_user_milestone_changed - redis_slot: code_review aggregation: weekly - name: i_code_review_user_labels_changed - redis_slot: code_review aggregation: weekly # Diff settings events - name: i_code_review_click_diff_view_setting - redis_slot: code_review aggregation: weekly - name: i_code_review_click_single_file_mode_setting - redis_slot: code_review aggregation: weekly - name: i_code_review_click_file_browser_setting - redis_slot: code_review aggregation: weekly - name: i_code_review_click_whitespace_setting - redis_slot: code_review aggregation: weekly - name: i_code_review_diff_view_inline - redis_slot: code_review aggregation: weekly - name: i_code_review_diff_view_parallel - redis_slot: code_review aggregation: weekly - name: i_code_review_file_browser_tree_view - redis_slot: code_review aggregation: weekly - name: i_code_review_file_browser_list_view - redis_slot: code_review aggregation: weekly - name: i_code_review_diff_show_whitespace - redis_slot: code_review aggregation: weekly - name: i_code_review_diff_hide_whitespace - redis_slot: code_review aggregation: weekly - name: i_code_review_diff_single_file - redis_slot: code_review aggregation: weekly - name: i_code_review_diff_multiple_files - redis_slot: code_review aggregation: weekly - name: i_code_review_user_load_conflict_ui - redis_slot: code_review aggregation: weekly - name: i_code_review_user_resolve_conflict - redis_slot: code_review aggregation: weekly - name: i_code_review_user_searches_diff - redis_slot: code_review aggregation: weekly - name: i_code_review_total_suggestions_applied - redis_slot: code_review aggregation: weekly - name: i_code_review_total_suggestions_added - redis_slot: code_review aggregation: weekly - name: i_code_review_user_resolve_thread_in_issue - redis_slot: code_review aggregation: weekly - name: i_code_review_widget_nothing_merge_click_new_file - redis_slot: code_review aggregation: weekly - name: i_code_review_post_merge_delete_branch - redis_slot: code_review aggregation: weekly - name: i_code_review_post_merge_click_revert - redis_slot: code_review aggregation: weekly - name: i_code_review_post_merge_click_cherry_pick - redis_slot: code_review aggregation: weekly - name: i_code_review_post_merge_submit_revert_modal - redis_slot: code_review aggregation: weekly - name: i_code_review_post_merge_submit_cherry_pick_modal - redis_slot: code_review aggregation: weekly # MR Widget Extensions ## Test Summary - name: i_code_review_merge_request_widget_test_summary_view - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_test_summary_full_report_clicked - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_test_summary_expand - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_test_summary_expand_success - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_test_summary_expand_warning - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_test_summary_expand_failed - redis_slot: code_review aggregation: weekly ## Accessibility - name: i_code_review_merge_request_widget_accessibility_view - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_accessibility_full_report_clicked - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_accessibility_expand - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_accessibility_expand_success - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_accessibility_expand_warning - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_accessibility_expand_failed - redis_slot: code_review aggregation: weekly ## Code Quality - name: i_code_review_merge_request_widget_code_quality_view - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_code_quality_full_report_clicked - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_code_quality_expand - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_code_quality_expand_success - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_code_quality_expand_warning - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_code_quality_expand_failed - redis_slot: code_review aggregation: weekly ## Terraform - name: i_code_review_merge_request_widget_terraform_view - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_terraform_full_report_clicked - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_terraform_expand - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_terraform_expand_success - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_terraform_expand_warning - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_terraform_expand_failed - redis_slot: code_review aggregation: weekly - name: i_code_review_submit_review_approve - redis_slot: code_review aggregation: weekly - name: i_code_review_submit_review_comment - redis_slot: code_review aggregation: weekly ## License Compliance - name: i_code_review_merge_request_widget_license_compliance_view - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_license_compliance_full_report_clicked - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_license_compliance_expand - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_license_compliance_expand_success - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_license_compliance_expand_warning - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_license_compliance_expand_failed - redis_slot: code_review aggregation: weekly ## Security Reports - name: i_code_review_merge_request_widget_security_reports_view - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_security_reports_full_report_clicked - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_security_reports_expand - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_security_reports_expand_success - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_security_reports_expand_warning - redis_slot: code_review aggregation: weekly - name: i_code_review_merge_request_widget_security_reports_expand_failed - redis_slot: code_review aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/common.yml b/lib/gitlab/usage_data_counters/known_events/common.yml index 630638c93bf..d3520961665 100644 --- a/lib/gitlab/usage_data_counters/known_events/common.yml +++ b/lib/gitlab/usage_data_counters/known_events/common.yml @@ -1,19 +1,14 @@ --- # Compliance category - name: g_edit_by_web_ide - redis_slot: edit aggregation: daily - name: g_edit_by_sfe - redis_slot: edit aggregation: daily - name: g_edit_by_snippet_ide - redis_slot: edit aggregation: daily - name: g_edit_by_live_preview - redis_slot: edit aggregation: daily - name: i_search_total - redis_slot: search aggregation: weekly - name: wiki_action aggregation: daily @@ -26,211 +21,145 @@ - name: merge_request_action aggregation: daily - name: i_source_code_code_intelligence - redis_slot: source_code aggregation: daily # Incident management - name: incident_management_alert_status_changed - redis_slot: incident_management aggregation: weekly - name: incident_management_alert_assigned - redis_slot: incident_management aggregation: weekly - name: incident_management_alert_todo - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_created - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_reopened - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_closed - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_assigned - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_todo - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_comment - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_zoom_meeting - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_relate - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_unrelate - redis_slot: incident_management aggregation: weekly - name: incident_management_incident_change_confidential - redis_slot: incident_management aggregation: weekly # Incident management timeline events - name: incident_management_timeline_event_created - redis_slot: incident_management aggregation: weekly - name: incident_management_timeline_event_edited - redis_slot: incident_management aggregation: weekly - name: incident_management_timeline_event_deleted - redis_slot: incident_management aggregation: weekly # Incident management alerts - name: incident_management_alert_create_incident - redis_slot: incident_management aggregation: weekly # Testing category - name: i_testing_test_case_parsed - redis_slot: testing - aggregation: weekly -- name: i_testing_summary_widget_total aggregation: weekly - name: i_testing_test_report_uploaded - redis_slot: testing aggregation: weekly - name: i_testing_coverage_report_uploaded - redis_slot: testing aggregation: weekly # Project Management group - name: g_project_management_issue_title_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_description_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_assignee_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_made_confidential - redis_slot: project_management aggregation: daily - name: g_project_management_issue_made_visible - redis_slot: project_management aggregation: daily - name: g_project_management_issue_created - redis_slot: project_management aggregation: daily - name: g_project_management_issue_closed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_reopened - redis_slot: project_management aggregation: daily - name: g_project_management_issue_label_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_milestone_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_cross_referenced - redis_slot: project_management aggregation: daily - name: g_project_management_issue_moved - redis_slot: project_management aggregation: daily - name: g_project_management_issue_related - redis_slot: project_management aggregation: daily - name: g_project_management_issue_unrelated - redis_slot: project_management aggregation: daily - name: g_project_management_issue_marked_as_duplicate - redis_slot: project_management aggregation: daily - name: g_project_management_issue_locked - redis_slot: project_management aggregation: daily - name: g_project_management_issue_unlocked - redis_slot: project_management aggregation: daily - name: g_project_management_issue_designs_added - redis_slot: project_management aggregation: daily - name: g_project_management_issue_designs_modified - redis_slot: project_management aggregation: daily - name: g_project_management_issue_designs_removed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_due_date_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_design_comments_removed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_time_estimate_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_time_spent_changed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_comment_added - redis_slot: project_management aggregation: daily - name: g_project_management_issue_comment_edited - redis_slot: project_management aggregation: daily - name: g_project_management_issue_comment_removed - redis_slot: project_management aggregation: daily - name: g_project_management_issue_cloned - redis_slot: project_management aggregation: daily # Runner group - name: g_runner_fleet_read_jobs_statistics - redis_slot: runner aggregation: weekly # Secrets Management - name: i_snippets_show - redis_slot: snippets aggregation: weekly # Terraform - name: p_terraform_state_api_unique_users - redis_slot: terraform aggregation: weekly # Pipeline Authoring group - name: o_pipeline_authoring_unique_users_committing_ciconfigfile - redis_slot: pipeline_authoring aggregation: weekly - name: o_pipeline_authoring_unique_users_pushing_mr_ciconfigfile - redis_slot: pipeline_authoring aggregation: weekly - name: i_ci_secrets_management_id_tokens_build_created - redis_slot: ci_secrets_management aggregation: weekly # Merge request widgets - name: users_expanding_secure_security_report - redis_slot: secure aggregation: weekly - name: users_expanding_testing_code_quality_report - redis_slot: testing aggregation: weekly - name: users_expanding_testing_accessibility_report - redis_slot: testing aggregation: weekly - name: users_expanding_testing_license_compliance_report - redis_slot: testing aggregation: weekly - name: users_visiting_testing_license_compliance_full_report - redis_slot: testing aggregation: weekly - name: users_visiting_testing_manage_license_compliance - redis_slot: testing aggregation: weekly - name: users_clicking_license_testing_visiting_external_website - redis_slot: testing aggregation: weekly # Geo group - name: g_geo_proxied_requests - redis_slot: geo aggregation: daily # Manage - name: unique_active_user aggregation: weekly # Environments page - name: users_visiting_environments_pages - redis_slot: users aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/container_registry_events.yml b/lib/gitlab/usage_data_counters/known_events/container_registry_events.yml index ac40079a6dc..aa0f9965fa7 100644 --- a/lib/gitlab/usage_data_counters/known_events/container_registry_events.yml +++ b/lib/gitlab/usage_data_counters/known_events/container_registry_events.yml @@ -1,16 +1,11 @@ --- - name: i_container_registry_push_tag_user aggregation: weekly - redis_slot: container_registry - name: i_container_registry_delete_tag_user aggregation: weekly - redis_slot: container_registry - name: i_container_registry_push_repository_user aggregation: weekly - redis_slot: container_registry - name: i_container_registry_delete_repository_user aggregation: weekly - redis_slot: container_registry - name: i_container_registry_create_repository_user aggregation: weekly - redis_slot: container_registry diff --git a/lib/gitlab/usage_data_counters/known_events/ecosystem.yml b/lib/gitlab/usage_data_counters/known_events/ecosystem.yml index 03bbba663c5..6e4a893d19a 100644 --- a/lib/gitlab/usage_data_counters/known_events/ecosystem.yml +++ b/lib/gitlab/usage_data_counters/known_events/ecosystem.yml @@ -1,35 +1,24 @@ --- # Ecosystem category - name: i_ecosystem_jira_service_close_issue - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_jira_service_cross_reference - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_issue_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_push_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_deployment_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_wiki_page_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_merge_request_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_note_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_tag_push_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_confidential_note_notification - redis_slot: ecosystem aggregation: weekly - name: i_ecosystem_slack_service_confidential_issue_notification - redis_slot: ecosystem aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/error_tracking.yml b/lib/gitlab/usage_data_counters/known_events/error_tracking.yml index efed16c11f8..ebfd1b274f9 100644 --- a/lib/gitlab/usage_data_counters/known_events/error_tracking.yml +++ b/lib/gitlab/usage_data_counters/known_events/error_tracking.yml @@ -1,7 +1,5 @@ --- - name: error_tracking_view_details - redis_slot: error_tracking aggregation: weekly - name: error_tracking_view_list - redis_slot: error_tracking aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/importer_events.yml b/lib/gitlab/usage_data_counters/known_events/importer_events.yml index a6c90a6c762..abbd83a012b 100644 --- a/lib/gitlab/usage_data_counters/known_events/importer_events.yml +++ b/lib/gitlab/usage_data_counters/known_events/importer_events.yml @@ -1,18 +1,13 @@ --- # Importer events - name: github_import_project_start - redis_slot: import aggregation: weekly - name: github_import_project_success - redis_slot: import aggregation: weekly - name: github_import_project_failure - redis_slot: import aggregation: weekly - name: github_import_project_cancelled - redis_slot: import aggregation: weekly - name: github_import_project_partially_completed - redis_slot: import aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/integrations.yml b/lib/gitlab/usage_data_counters/known_events/integrations.yml new file mode 100644 index 00000000000..4a83581e9f0 --- /dev/null +++ b/lib/gitlab/usage_data_counters/known_events/integrations.yml @@ -0,0 +1,18 @@ +- name: i_integrations_gitlab_for_slack_app_issue_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_push_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_deployment_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_wiki_page_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_merge_request_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_note_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_tag_push_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_confidential_note_notification + aggregation: weekly +- name: i_integrations_gitlab_for_slack_app_confidential_issue_notification + aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/kubernetes_agent.yml b/lib/gitlab/usage_data_counters/known_events/kubernetes_agent.yml index 9703c022ef5..b3d1c51c0e7 100644 --- a/lib/gitlab/usage_data_counters/known_events/kubernetes_agent.yml +++ b/lib/gitlab/usage_data_counters/known_events/kubernetes_agent.yml @@ -1,3 +1,2 @@ - name: agent_users_using_ci_tunnel - redis_slot: agent aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/package_events.yml b/lib/gitlab/usage_data_counters/known_events/package_events.yml index d9797635240..fa99798cde0 100644 --- a/lib/gitlab/usage_data_counters/known_events/package_events.yml +++ b/lib/gitlab/usage_data_counters/known_events/package_events.yml @@ -1,67 +1,49 @@ --- - name: i_package_composer_deploy_token aggregation: weekly - redis_slot: package - name: i_package_composer_user aggregation: weekly - redis_slot: package - name: i_package_conan_deploy_token aggregation: weekly - redis_slot: package - name: i_package_conan_user aggregation: weekly - redis_slot: package +- name: i_package_debian_deploy_token + aggregation: weekly +- name: i_package_debian_user + aggregation: weekly - name: i_package_generic_deploy_token aggregation: weekly - redis_slot: package - name: i_package_generic_user aggregation: weekly - redis_slot: package - name: i_package_helm_deploy_token aggregation: weekly - redis_slot: package - name: i_package_helm_user aggregation: weekly - redis_slot: package - name: i_package_maven_deploy_token aggregation: weekly - redis_slot: package - name: i_package_maven_user aggregation: weekly - redis_slot: package - name: i_package_npm_deploy_token aggregation: weekly - redis_slot: package - name: i_package_npm_user aggregation: weekly - redis_slot: package - name: i_package_nuget_deploy_token aggregation: weekly - redis_slot: package - name: i_package_nuget_user aggregation: weekly - redis_slot: package - name: i_package_pypi_deploy_token aggregation: weekly - redis_slot: package - name: i_package_pypi_user aggregation: weekly - redis_slot: package - name: i_package_rubygems_deploy_token aggregation: weekly - redis_slot: package - name: i_package_rubygems_user aggregation: weekly - redis_slot: package - name: i_package_terraform_module_deploy_token aggregation: weekly - redis_slot: package - name: i_package_terraform_module_user aggregation: weekly - redis_slot: package - name: i_package_rpm_user aggregation: weekly - redis_slot: package - name: i_package_rpm_deploy_token aggregation: weekly - redis_slot: package diff --git a/lib/gitlab/usage_data_counters/known_events/quickactions.yml b/lib/gitlab/usage_data_counters/known_events/quickactions.yml index 306ed79ea23..ee5fa29c0c3 100644 --- a/lib/gitlab/usage_data_counters/known_events/quickactions.yml +++ b/lib/gitlab/usage_data_counters/known_events/quickactions.yml @@ -1,190 +1,129 @@ --- - name: i_quickactions_assign_multiple - redis_slot: quickactions aggregation: weekly - name: i_quickactions_approve - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unapprove - redis_slot: quickactions aggregation: weekly - name: i_quickactions_assign_single - redis_slot: quickactions aggregation: weekly - name: i_quickactions_assign_self - redis_slot: quickactions aggregation: weekly - name: i_quickactions_assign_reviewer - redis_slot: quickactions aggregation: weekly - name: i_quickactions_award - redis_slot: quickactions aggregation: weekly - name: i_quickactions_board_move - redis_slot: quickactions aggregation: weekly - name: i_quickactions_clone - redis_slot: quickactions aggregation: weekly - name: i_quickactions_close - redis_slot: quickactions aggregation: weekly - name: i_quickactions_confidential - redis_slot: quickactions aggregation: weekly - name: i_quickactions_copy_metadata_merge_request - redis_slot: quickactions aggregation: weekly - name: i_quickactions_copy_metadata_issue - redis_slot: quickactions aggregation: weekly - name: i_quickactions_create_merge_request - redis_slot: quickactions aggregation: weekly - name: i_quickactions_done - redis_slot: quickactions aggregation: weekly - name: i_quickactions_draft - redis_slot: quickactions aggregation: weekly - name: i_quickactions_due - redis_slot: quickactions aggregation: weekly - name: i_quickactions_duplicate - redis_slot: quickactions aggregation: weekly - name: i_quickactions_estimate - redis_slot: quickactions aggregation: weekly - name: i_quickactions_label - redis_slot: quickactions aggregation: weekly - name: i_quickactions_lock - redis_slot: quickactions aggregation: weekly - name: i_quickactions_merge - redis_slot: quickactions aggregation: weekly - name: i_quickactions_milestone - redis_slot: quickactions aggregation: weekly - name: i_quickactions_move - redis_slot: quickactions aggregation: weekly - name: i_quickactions_promote_to_incident - redis_slot: quickactions aggregation: weekly - name: i_quickactions_timeline - redis_slot: quickactions aggregation: weekly - name: i_quickactions_ready - redis_slot: quickactions aggregation: weekly - name: i_quickactions_reassign - redis_slot: quickactions aggregation: weekly - name: i_quickactions_reassign_reviewer - redis_slot: quickactions aggregation: weekly - name: i_quickactions_rebase - redis_slot: quickactions aggregation: weekly - name: i_quickactions_relabel - redis_slot: quickactions aggregation: weekly - name: i_quickactions_relate - redis_slot: quickactions aggregation: weekly - name: i_quickactions_remove_due_date - redis_slot: quickactions aggregation: weekly - name: i_quickactions_remove_estimate - redis_slot: quickactions aggregation: weekly - name: i_quickactions_remove_milestone - redis_slot: quickactions aggregation: weekly - name: i_quickactions_remove_time_spent - redis_slot: quickactions aggregation: weekly - name: i_quickactions_remove_zoom - redis_slot: quickactions aggregation: weekly - name: i_quickactions_reopen - redis_slot: quickactions aggregation: weekly - name: i_quickactions_severity - redis_slot: quickactions aggregation: weekly - name: i_quickactions_shrug - redis_slot: quickactions aggregation: weekly - name: i_quickactions_spend_subtract - redis_slot: quickactions aggregation: weekly - name: i_quickactions_spend_add - redis_slot: quickactions aggregation: weekly - name: i_quickactions_submit_review - redis_slot: quickactions aggregation: weekly - name: i_quickactions_subscribe - redis_slot: quickactions + aggregation: weekly +- name: i_quickactions_summarize_diff aggregation: weekly - name: i_quickactions_tableflip - redis_slot: quickactions aggregation: weekly - name: i_quickactions_tag - redis_slot: quickactions aggregation: weekly - name: i_quickactions_target_branch - redis_slot: quickactions aggregation: weekly - name: i_quickactions_title - redis_slot: quickactions aggregation: weekly - name: i_quickactions_todo - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unassign_specific - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unassign_all - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unassign_reviewer - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unlabel_specific - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unlabel_all - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unlock - redis_slot: quickactions aggregation: weekly - name: i_quickactions_unsubscribe - redis_slot: quickactions aggregation: weekly - name: i_quickactions_wip - redis_slot: quickactions aggregation: weekly - name: i_quickactions_zoom - redis_slot: quickactions aggregation: weekly - name: i_quickactions_link - redis_slot: quickactions aggregation: weekly - name: i_quickactions_invite_email_single - redis_slot: quickactions aggregation: weekly - name: i_quickactions_invite_email_multiple - redis_slot: quickactions aggregation: weekly - name: i_quickactions_add_contacts - redis_slot: quickactions aggregation: weekly - name: i_quickactions_remove_contacts - redis_slot: quickactions aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/work_items.yml b/lib/gitlab/usage_data_counters/known_events/work_items.yml index 1f0cc0c8a2e..a6e5b9e1af5 100644 --- a/lib/gitlab/usage_data_counters/known_events/work_items.yml +++ b/lib/gitlab/usage_data_counters/known_events/work_items.yml @@ -1,28 +1,21 @@ --- - name: users_updating_work_item_title - redis_slot: users aggregation: weekly - name: users_creating_work_items - redis_slot: users aggregation: weekly - name: users_updating_work_item_dates - redis_slot: users aggregation: weekly - name: users_updating_work_item_labels - redis_slot: users aggregation: weekly - name: users_updating_work_item_milestone - redis_slot: users aggregation: weekly - name: users_updating_work_item_iteration # The event tracks an EE feature. # It's added here so it can be aggregated into the CE/EE 'OR' aggregate metrics. # It will report 0 for CE instances and should not be used with 'AND' aggregators. - redis_slot: users aggregation: weekly - name: users_updating_weight_estimate # The event tracks an EE feature. # It's added here so it can be aggregated into the CE/EE 'OR' aggregate metrics. # It will report 0 for CE instances and should not be used with 'AND' aggregators. - redis_slot: users aggregation: weekly diff --git a/lib/gitlab/utils/error_message.rb b/lib/gitlab/utils/error_message.rb index e9c6f8a5847..72b69fb078f 100644 --- a/lib/gitlab/utils/error_message.rb +++ b/lib/gitlab/utils/error_message.rb @@ -5,8 +5,14 @@ module Gitlab module ErrorMessage extend self + UF_ERROR_PREFIX = 'UF' + def to_user_facing(message) - "UF: #{message}" + prefixed_error_message(message, UF_ERROR_PREFIX) + end + + def prefixed_error_message(message, prefix) + "#{prefix}: #{message}" end end end diff --git a/lib/gitlab/utils/strong_memoize.rb b/lib/gitlab/utils/strong_memoize.rb index eb44b7ddd95..2b3841b8f09 100644 --- a/lib/gitlab/utils/strong_memoize.rb +++ b/lib/gitlab/utils/strong_memoize.rb @@ -35,6 +35,27 @@ module Gitlab end end + # Works the same way as "strong_memoize" but takes + # a second argument - expire_in. This allows invalidate + # the data after specified number of seconds + def strong_memoize_with_expiration(name, expire_in) + key = ivar(name) + expiration_key = "#{key}_expired_at" + + if instance_variable_defined?(expiration_key) + expire_at = instance_variable_get(expiration_key) + clear_memoization(name) if Time.current > expire_at + end + + if instance_variable_defined?(key) + instance_variable_get(key) + else + value = instance_variable_set(key, yield) + instance_variable_set(expiration_key, Time.current + expire_in) + value + end + end + def strong_memoize_with(name, *args) container = strong_memoize(name) { {} } |