Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-04-20 13:00:54 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-04-20 13:00:54 +0300
commit3cccd102ba543e02725d247893729e5c73b38295 (patch)
treef36a04ec38517f5deaaacb5acc7d949688d1e187 /lib/gitlab/background_migration
parent205943281328046ef7b4528031b90fbda70c75ac (diff)
Add latest changes from gitlab-org/gitlab@14-10-stable-eev14.10.0-rc42
Diffstat (limited to 'lib/gitlab/background_migration')
-rw-r--r--lib/gitlab/background_migration/backfill_draft_status_on_merge_requests.rb2
-rw-r--r--lib/gitlab/background_migration/backfill_group_features.rb47
-rw-r--r--lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses.rb32
-rw-r--r--lib/gitlab/background_migration/backfill_namespace_id_for_project_route.rb58
-rw-r--r--lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb73
-rw-r--r--lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy.rb19
-rw-r--r--lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb17
-rw-r--r--lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex.rb48
-rw-r--r--lib/gitlab/background_migration/encrypt_static_object_token.rb4
-rw-r--r--lib/gitlab/background_migration/fix_duplicate_project_name_and_path.rb82
-rw-r--r--lib/gitlab/background_migration/merge_topics_with_same_name.rb76
-rw-r--r--lib/gitlab/background_migration/migrate_shimo_confluence_integration_category.rb27
-rw-r--r--lib/gitlab/background_migration/populate_container_repository_migration_plan.rb51
-rw-r--r--lib/gitlab/background_migration/populate_namespace_statistics.rb33
-rw-r--r--lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb12
15 files changed, 538 insertions, 43 deletions
diff --git a/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests.rb b/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests.rb
index b0a8c3a8cbb..52ff3aaa423 100644
--- a/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests.rb
+++ b/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests.rb
@@ -22,8 +22,6 @@ module Gitlab
def perform(start_id, end_id)
eligible_mrs = MergeRequest.eligible.where(id: start_id..end_id).pluck(:id)
- return if eligible_mrs.empty?
-
eligible_mrs.each_slice(10) do |slice|
MergeRequest.where(id: slice).update_all(draft: true)
end
diff --git a/lib/gitlab/background_migration/backfill_group_features.rb b/lib/gitlab/background_migration/backfill_group_features.rb
new file mode 100644
index 00000000000..084c788c8cb
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_group_features.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfill group_features for an array of groups
+ class BackfillGroupFeatures < ::Gitlab::BackgroundMigration::BaseJob
+ include Gitlab::Database::DynamicModelHelpers
+
+ def perform(start_id, end_id, batch_table, batch_column, sub_batch_size, pause_ms, batch_size)
+ pause_ms = 0 if pause_ms < 0
+
+ parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id)
+ parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size, order_hint: :type) do |sub_batch|
+ batch_metrics.time_operation(:upsert_group_features) do
+ upsert_group_features(sub_batch, batch_size)
+ end
+
+ sleep(pause_ms * 0.001)
+ end
+ end
+
+ def batch_metrics
+ @batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new
+ end
+
+ private
+
+ def relation_scoped_to_range(source_table, source_key_column, start_id, stop_id)
+ define_batchable_model(source_table, connection: connection)
+ .where(source_key_column => start_id..stop_id)
+ .where(type: 'Group')
+ end
+
+ def upsert_group_features(relation, batch_size)
+ connection.execute(
+ <<~SQL
+ INSERT INTO group_features (group_id, created_at, updated_at)
+ SELECT namespaces.id as group_id, now(), now()
+ FROM namespaces
+ WHERE namespaces.type = 'Group' AND namespaces.id IN(#{relation.select(:id).limit(batch_size).to_sql})
+ ON CONFLICT (group_id) DO NOTHING;
+ SQL
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses.rb b/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses.rb
deleted file mode 100644
index 2d46ff6b933..00000000000
--- a/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # BackfillIncidentIssueEscalationStatuses adds
- # IncidentManagement::IssuableEscalationStatus records for existing Incident issues.
- # They will be added with no policy, and escalations_started_at as nil.
- class BackfillIncidentIssueEscalationStatuses
- def perform(start_id, stop_id)
- ActiveRecord::Base.connection.execute <<~SQL
- INSERT INTO incident_management_issuable_escalation_statuses (issue_id, created_at, updated_at)
- SELECT issues.id, current_timestamp, current_timestamp
- FROM issues
- WHERE issues.issue_type = 1
- AND issues.id BETWEEN #{start_id} AND #{stop_id}
- ON CONFLICT (issue_id) DO NOTHING;
- SQL
-
- mark_job_as_succeeded(start_id, stop_id)
- end
-
- private
-
- def mark_job_as_succeeded(*arguments)
- ::Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
- self.class.name.demodulize,
- arguments
- )
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/backfill_namespace_id_for_project_route.rb b/lib/gitlab/background_migration/backfill_namespace_id_for_project_route.rb
new file mode 100644
index 00000000000..1f0d606f001
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_namespace_id_for_project_route.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfills the `routes.namespace_id` column, by setting it to project.project_namespace_id
+ class BackfillNamespaceIdForProjectRoute
+ include Gitlab::Database::DynamicModelHelpers
+
+ def perform(start_id, end_id, batch_table, batch_column, sub_batch_size, pause_ms)
+ parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id)
+
+ parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size) do |sub_batch|
+ cleanup_gin_index('routes')
+
+ batch_metrics.time_operation(:update_all) do
+ ActiveRecord::Base.connection.execute <<~SQL
+ WITH route_and_ns(route_id, project_namespace_id) AS #{::Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
+ #{sub_batch.to_sql}
+ )
+ UPDATE routes
+ SET namespace_id = route_and_ns.project_namespace_id
+ FROM route_and_ns
+ WHERE id = route_and_ns.route_id
+ SQL
+ end
+
+ pause_ms = [0, pause_ms].max
+ sleep(pause_ms * 0.001)
+ end
+ end
+
+ def batch_metrics
+ @batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new
+ end
+
+ private
+
+ def cleanup_gin_index(table_name)
+ sql = "select indexname::text from pg_indexes where tablename = '#{table_name}' and indexdef ilike '%gin%'"
+ index_names = ActiveRecord::Base.connection.select_values(sql)
+
+ index_names.each do |index_name|
+ ActiveRecord::Base.connection.execute("select gin_clean_pending_list('#{index_name}')")
+ end
+ end
+
+ def relation_scoped_to_range(source_table, source_key_column, start_id, stop_id)
+ define_batchable_model(source_table, connection: ActiveRecord::Base.connection)
+ .joins('INNER JOIN projects ON routes.source_id = projects.id')
+ .where(source_key_column => start_id..stop_id)
+ .where(namespace_id: nil)
+ .where(source_type: 'Project')
+ .where.not(projects: { project_namespace_id: nil })
+ .select("routes.id, projects.project_namespace_id")
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb b/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb
new file mode 100644
index 00000000000..a16efa4222b
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfills the `issues.work_item_type_id` column, replacing any
+ # instances of `NULL` with the appropriate `work_item_types.id` based on `issues.issue_type`
+ class BackfillWorkItemTypeIdForIssues
+ # Basic AR model for issues table
+ class MigrationIssue < ApplicationRecord
+ include ::EachBatch
+
+ self.table_name = 'issues'
+
+ scope :base_query, ->(base_type) { where(work_item_type_id: nil, issue_type: base_type) }
+ end
+
+ MAX_UPDATE_RETRIES = 3
+
+ def perform(start_id, end_id, batch_table, batch_column, sub_batch_size, pause_ms, base_type, base_type_id)
+ parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id, base_type)
+
+ parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size) do |sub_batch|
+ first, last = sub_batch.pluck(Arel.sql('min(id), max(id)')).first
+
+ # The query need to be reconstructed because .each_batch modifies the default scope
+ # See: https://gitlab.com/gitlab-org/gitlab/-/issues/330510
+ reconstructed_sub_batch = MigrationIssue.unscoped.base_query(base_type).where(id: first..last)
+
+ batch_metrics.time_operation(:update_all) do
+ update_with_retry(reconstructed_sub_batch, base_type_id)
+ end
+
+ pause_ms = 0 if pause_ms < 0
+ sleep(pause_ms * 0.001)
+ end
+ end
+
+ def batch_metrics
+ @batch_metrics ||= Gitlab::Database::BackgroundMigration::BatchMetrics.new
+ end
+
+ private
+
+ # Retry mechanism required as update statements on the issues table will randomly take longer than
+ # expected due to gin indexes https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71869#note_775796352
+ def update_with_retry(sub_batch, base_type_id)
+ update_attempt = 1
+
+ begin
+ update_batch(sub_batch, base_type_id)
+ rescue ActiveRecord::StatementTimeout, ActiveRecord::QueryCanceled => e
+ update_attempt += 1
+
+ if update_attempt <= MAX_UPDATE_RETRIES
+ # sleeping 30 seconds as it might take a long time to clean the gin index pending list
+ sleep(30)
+ retry
+ end
+
+ raise e
+ end
+ end
+
+ def update_batch(sub_batch, base_type_id)
+ sub_batch.update_all(work_item_type_id: base_type_id)
+ end
+
+ def relation_scoped_to_range(source_table, source_key_column, start_id, end_id, base_type)
+ MigrationIssue.where(source_key_column => start_id..end_id).base_query(base_type)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy.rb b/lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy.rb
new file mode 100644
index 00000000000..06036eebcb9
--- /dev/null
+++ b/lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ module BatchingStrategies
+ # Batching class to use for back-filling issue's work_item_type_id for a single issue type.
+ # Batches will be scoped to records where the foreign key is NULL and only of a given issue type
+ #
+ # If no more batches exist in the table, returns nil.
+ class BackfillIssueWorkItemTypeBatchingStrategy < PrimaryKeyBatchingStrategy
+ def apply_additional_filters(relation, job_arguments:)
+ issue_type = job_arguments.first
+
+ relation.where(issue_type: issue_type)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb b/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb
index 5569bac0e19..e7a68b183b8 100644
--- a/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb
+++ b/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb
@@ -23,6 +23,7 @@ module Gitlab
quoted_column_name = model_class.connection.quote_column_name(column_name)
relation = model_class.where("#{quoted_column_name} >= ?", batch_min_value)
+ relation = apply_additional_filters(relation, job_arguments: job_arguments)
next_batch_bounds = nil
relation.each_batch(of: batch_size, column: column_name) do |batch| # rubocop:disable Lint/UnreachableLoop
@@ -33,6 +34,22 @@ module Gitlab
next_batch_bounds
end
+
+ # Strategies based on PrimaryKeyBatchingStrategy can use
+ # this method to easily apply additional filters.
+ #
+ # Example:
+ #
+ # class MatchingType < PrimaryKeyBatchingStrategy
+ # def apply_additional_filters(relation, job_arguments:)
+ # type = job_arguments.first
+ #
+ # relation.where(type: type)
+ # end
+ # end
+ def apply_additional_filters(relation, job_arguments: [])
+ relation
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex.rb b/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex.rb
new file mode 100644
index 00000000000..b703faf6a6c
--- /dev/null
+++ b/lib/gitlab/background_migration/cleanup_draft_data_from_faulty_regex.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Cleanup draft column data inserted by a faulty regex
+ #
+ class CleanupDraftDataFromFaultyRegex
+ # Migration only version of MergeRequest table
+ ##
+ class MergeRequest < ActiveRecord::Base
+ LEAKY_REGEXP_STR = "^\\[draft\\]|\\(draft\\)|draft:|draft|\\[WIP\\]|WIP:|WIP"
+ CORRECTED_REGEXP_STR = "^(\\[draft\\]|\\(draft\\)|draft:|draft|\\[WIP\\]|WIP:|WIP)"
+
+ include EachBatch
+
+ self.table_name = 'merge_requests'
+
+ def self.eligible
+ where(state_id: 1)
+ .where(draft: true)
+ .where("title ~* ?", LEAKY_REGEXP_STR)
+ .where("title !~* ?", CORRECTED_REGEXP_STR)
+ end
+ end
+
+ def perform(start_id, end_id)
+ eligible_mrs = MergeRequest.eligible.where(id: start_id..end_id).pluck(:id)
+
+ return if eligible_mrs.empty?
+
+ eligible_mrs.each_slice(10) do |slice|
+ MergeRequest.where(id: slice).update_all(draft: false)
+ end
+
+ mark_job_as_succeeded(start_id, end_id)
+ end
+
+ private
+
+ def mark_job_as_succeeded(*arguments)
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ 'CleanupDraftDataFromFaultyRegex',
+ arguments
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/encrypt_static_object_token.rb b/lib/gitlab/background_migration/encrypt_static_object_token.rb
index 80931353e2f..a087d2529eb 100644
--- a/lib/gitlab/background_migration/encrypt_static_object_token.rb
+++ b/lib/gitlab/background_migration/encrypt_static_object_token.rb
@@ -52,9 +52,9 @@ module Gitlab
WHERE cte_id = id
SQL
end
-
- mark_job_as_succeeded(start_id, end_id)
end
+
+ mark_job_as_succeeded(start_id, end_id)
end
private
diff --git a/lib/gitlab/background_migration/fix_duplicate_project_name_and_path.rb b/lib/gitlab/background_migration/fix_duplicate_project_name_and_path.rb
new file mode 100644
index 00000000000..defd9ea832b
--- /dev/null
+++ b/lib/gitlab/background_migration/fix_duplicate_project_name_and_path.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Fix project name duplicates and backfill missing project namespace ids
+ class FixDuplicateProjectNameAndPath
+ SUB_BATCH_SIZE = 10
+ # isolated project active record
+ class Project < ActiveRecord::Base
+ include ::EachBatch
+
+ self.table_name = 'projects'
+
+ scope :without_project_namespace, -> { where(project_namespace_id: nil) }
+ scope :id_in, ->(ids) { where(id: ids) }
+ end
+
+ def perform(start_id, end_id)
+ @project_ids = fetch_project_ids(start_id, end_id)
+ backfill_project_namespaces_service = init_backfill_service(project_ids)
+ backfill_project_namespaces_service.cleanup_gin_index('projects')
+
+ project_ids.each_slice(SUB_BATCH_SIZE) do |ids|
+ ActiveRecord::Base.connection.execute(update_projects_name_and_path_sql(ids))
+ end
+
+ backfill_project_namespaces_service.backfill_project_namespaces
+
+ mark_job_as_succeeded(start_id, end_id)
+ end
+
+ private
+
+ attr_accessor :project_ids
+
+ def fetch_project_ids(start_id, end_id)
+ Project.without_project_namespace.where(id: start_id..end_id)
+ end
+
+ def init_backfill_service(project_ids)
+ service = Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces.new
+ service.project_ids = project_ids
+ service.sub_batch_size = SUB_BATCH_SIZE
+
+ service
+ end
+
+ def update_projects_name_and_path_sql(project_ids)
+ <<~SQL
+ WITH cte (project_id, path_from_route ) AS (
+ #{path_from_route_sql(project_ids).to_sql}
+ )
+ UPDATE
+ projects
+ SET
+ name = concat(projects.name, '-', id),
+ path = CASE
+ WHEN projects.path <> cte.path_from_route THEN path_from_route
+ ELSE projects.path
+ END
+ FROM
+ cte
+ WHERE
+ projects.id = cte.project_id;
+ SQL
+ end
+
+ def path_from_route_sql(project_ids)
+ Project.without_project_namespace.id_in(project_ids)
+ .joins("INNER JOIN routes ON routes.source_id = projects.id AND routes.source_type = 'Project'")
+ .select("projects.id, SUBSTRING(routes.path FROM '[^/]+(?=/$|$)') AS path_from_route")
+ end
+
+ def mark_job_as_succeeded(*arguments)
+ ::Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ 'FixDuplicateProjectNameAndPath',
+ arguments
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/merge_topics_with_same_name.rb b/lib/gitlab/background_migration/merge_topics_with_same_name.rb
new file mode 100644
index 00000000000..07231098a5f
--- /dev/null
+++ b/lib/gitlab/background_migration/merge_topics_with_same_name.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # The class to merge project topics with the same case insensitive name
+ class MergeTopicsWithSameName
+ # Temporary AR model for topics
+ class Topic < ActiveRecord::Base
+ self.table_name = 'topics'
+ end
+
+ # Temporary AR model for project topic assignment
+ class ProjectTopic < ActiveRecord::Base
+ self.table_name = 'project_topics'
+ end
+
+ def perform(topic_names)
+ topic_names.each do |topic_name|
+ topics = Topic.where('LOWER(name) = ?', topic_name)
+ .order(total_projects_count: :desc, non_private_projects_count: :desc, id: :asc)
+ .to_a
+ topic_to_keep = topics.shift
+ merge_topics(topic_to_keep, topics) if topics.any?
+ end
+ end
+
+ private
+
+ def merge_topics(topic_to_keep, topics_to_remove)
+ description = topic_to_keep.description
+
+ topics_to_remove.each do |topic|
+ description ||= topic.description if topic.description.present?
+ process_avatar(topic_to_keep, topic) if topic.avatar.present?
+
+ ProjectTopic.transaction do
+ ProjectTopic.where(topic_id: topic.id)
+ .where.not(project_id: ProjectTopic.where(topic_id: topic_to_keep).select(:project_id))
+ .update_all(topic_id: topic_to_keep.id)
+ ProjectTopic.where(topic_id: topic.id).delete_all
+ end
+ end
+
+ Topic.where(id: topics_to_remove).delete_all
+
+ topic_to_keep.update(
+ description: description,
+ total_projects_count: total_projects_count(topic_to_keep.id),
+ non_private_projects_count: non_private_projects_count(topic_to_keep.id)
+ )
+ end
+
+ # We intentionally use application code here because we need to copy/remove avatar files
+ def process_avatar(topic_to_keep, topic_to_remove)
+ topic_to_remove = ::Projects::Topic.find(topic_to_remove.id)
+ topic_to_keep = ::Projects::Topic.find(topic_to_keep.id)
+ unless topic_to_keep.avatar.present?
+ topic_to_keep.avatar = topic_to_remove.avatar
+ topic_to_keep.save!
+ end
+
+ topic_to_remove.remove_avatar!
+ topic_to_remove.save!
+ end
+
+ def total_projects_count(topic_id)
+ ProjectTopic.where(topic_id: topic_id).count
+ end
+
+ def non_private_projects_count(topic_id)
+ ProjectTopic.joins('INNER JOIN projects ON project_topics.project_id = projects.id')
+ .where(project_topics: { topic_id: topic_id }).where('projects.visibility_level in (10, 20)').count
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category.rb b/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category.rb
new file mode 100644
index 00000000000..ec4631d1e34
--- /dev/null
+++ b/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # The class to migrate category of integrations to third_party_wiki for confluence and shimo
+ class MigrateShimoConfluenceIntegrationCategory
+ include Gitlab::Database::DynamicModelHelpers
+
+ def perform(start_id, end_id)
+ define_batchable_model('integrations', connection: ::ActiveRecord::Base.connection)
+ .where(id: start_id..end_id, type_new: %w[Integrations::Confluence Integrations::Shimo])
+ .update_all(category: 'third_party_wiki')
+
+ mark_job_as_succeeded(start_id, end_id)
+ end
+
+ private
+
+ def mark_job_as_succeeded(*arguments)
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ self.class.name.demodulize,
+ arguments
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_container_repository_migration_plan.rb b/lib/gitlab/background_migration/populate_container_repository_migration_plan.rb
new file mode 100644
index 00000000000..9e102ea1517
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_container_repository_migration_plan.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # The class to populates the migration_plan column of container_repositories
+ # with the current plan of the namespaces that owns the container_repository
+ #
+ # The plan can be NULL, in which case no UPDATE
+ # will be executed.
+ class PopulateContainerRepositoryMigrationPlan
+ def perform(start_id, end_id)
+ (start_id..end_id).each do |id|
+ execute(<<~SQL)
+ WITH selected_plan AS (
+ SELECT "plans"."name"
+ FROM "container_repositories"
+ INNER JOIN "projects" ON "projects"."id" = "container_repositories"."project_id"
+ INNER JOIN "namespaces" ON "namespaces"."id" = "projects"."namespace_id"
+ INNER JOIN "gitlab_subscriptions" ON "gitlab_subscriptions"."namespace_id" = "namespaces"."traversal_ids"[1]
+ INNER JOIN "plans" ON "plans"."id" = "gitlab_subscriptions"."hosted_plan_id"
+ WHERE "container_repositories"."id" = #{id}
+ )
+ UPDATE container_repositories
+ SET migration_plan = selected_plan.name
+ FROM selected_plan
+ WHERE container_repositories.id = #{id};
+ SQL
+ end
+
+ mark_job_as_succeeded(start_id, end_id)
+ end
+
+ private
+
+ def connection
+ @connection ||= ::ActiveRecord::Base.connection
+ end
+
+ def execute(sql)
+ connection.execute(sql)
+ end
+
+ def mark_job_as_succeeded(*arguments)
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
+ self.class.name.demodulize,
+ arguments
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_namespace_statistics.rb b/lib/gitlab/background_migration/populate_namespace_statistics.rb
index e873ad412f2..97927ef48c2 100644
--- a/lib/gitlab/background_migration/populate_namespace_statistics.rb
+++ b/lib/gitlab/background_migration/populate_namespace_statistics.rb
@@ -5,9 +5,40 @@ module Gitlab
# This class creates/updates those namespace statistics
# that haven't been created nor initialized.
# It also updates the related namespace statistics
- # This is only required in EE
class PopulateNamespaceStatistics
def perform(group_ids, statistics)
+ # Updating group statistics might involve calling Gitaly.
+ # For example, when calculating `wiki_size`, we will need
+ # to perform the request to check if the repo exists and
+ # also the repository size.
+ #
+ # The `allow_n_plus_1_calls` method is only intended for
+ # dev and test. It won't be raised in prod.
+ ::Gitlab::GitalyClient.allow_n_plus_1_calls do
+ relation(group_ids).each do |group|
+ upsert_namespace_statistics(group, statistics)
+ end
+ end
+ end
+
+ private
+
+ def upsert_namespace_statistics(group, statistics)
+ response = ::Groups::UpdateStatisticsService.new(group, statistics: statistics).execute
+
+ error_message("#{response.message} group: #{group.id}") if response.error?
+ end
+
+ def logger
+ @logger ||= ::Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def error_message(message)
+ logger.error(message: "Namespace Statistics Migration: #{message}")
+ end
+
+ def relation(group_ids)
+ Group.includes(:namespace_statistics).where(id: group_ids)
end
end
end
diff --git a/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
index c34cc57ce60..bd7d7d02162 100644
--- a/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
+++ b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
@@ -7,6 +7,8 @@ module Gitlab
#
# rubocop: disable Metrics/ClassLength
class BackfillProjectNamespaces
+ attr_accessor :project_ids, :sub_batch_size
+
SUB_BATCH_SIZE = 25
PROJECT_NAMESPACE_STI_NAME = 'Project'
@@ -18,7 +20,7 @@ module Gitlab
case migration_type
when 'up'
- backfill_project_namespaces(namespace_id)
+ backfill_project_namespaces
mark_job_as_succeeded(start_id, end_id, namespace_id, 'up')
when 'down'
cleanup_backfilled_project_namespaces(namespace_id)
@@ -28,11 +30,7 @@ module Gitlab
end
end
- private
-
- attr_accessor :project_ids, :sub_batch_size
-
- def backfill_project_namespaces(namespace_id)
+ def backfill_project_namespaces
project_ids.each_slice(sub_batch_size) do |project_ids|
# cleanup gin indexes on namespaces table
cleanup_gin_index('namespaces')
@@ -64,6 +62,8 @@ module Gitlab
end
end
+ private
+
def cleanup_backfilled_project_namespaces(namespace_id)
project_ids.each_slice(sub_batch_size) do |project_ids|
# IMPORTANT: first nullify project_namespace_id in projects table to avoid removing projects when records