Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'lib/gitlab/hashed_storage')
-rw-r--r--lib/gitlab/hashed_storage/migrator.rb125
-rw-r--r--lib/gitlab/hashed_storage/rake_helper.rb129
2 files changed, 0 insertions, 254 deletions
diff --git a/lib/gitlab/hashed_storage/migrator.rb b/lib/gitlab/hashed_storage/migrator.rb
deleted file mode 100644
index 912e2ee99e9..00000000000
--- a/lib/gitlab/hashed_storage/migrator.rb
+++ /dev/null
@@ -1,125 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module HashedStorage
- # Hashed Storage Migrator
- #
- # This is responsible for scheduling and flagging projects
- # to be migrated from Legacy to Hashed storage, either one by one or in bulk.
- class Migrator
- BATCH_SIZE = 100
-
- # Schedule a range of projects to be bulk migrated with #bulk_migrate asynchronously
- #
- # @param [Integer] start first project id for the range
- # @param [Integer] finish last project id for the range
- def bulk_schedule_migration(start:, finish:)
- ::HashedStorage::MigratorWorker.perform_async(start, finish)
- end
-
- # Schedule a range of projects to be bulk rolledback with #bulk_rollback asynchronously
- #
- # @param [Integer] start first project id for the range
- # @param [Integer] finish last project id for the range
- def bulk_schedule_rollback(start:, finish:)
- ::HashedStorage::RollbackerWorker.perform_async(start, finish)
- end
-
- # Start migration of projects from specified range
- #
- # Flagging a project to be migrated is a synchronous action
- # but the migration runs through async jobs
- #
- # @param [Integer] start first project id for the range
- # @param [Integer] finish last project id for the range
- # rubocop: disable CodeReuse/ActiveRecord
- def bulk_migrate(start:, finish:)
- projects = build_relation(start, finish)
-
- projects.with_route.find_each(batch_size: BATCH_SIZE) do |project|
- migrate(project)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # Start rollback of projects from specified range
- #
- # Flagging a project to be rolled back is a synchronous action
- # but the rollback runs through async jobs
- #
- # @param [Integer] start first project id for the range
- # @param [Integer] finish last project id for the range
- # rubocop: disable CodeReuse/ActiveRecord
- def bulk_rollback(start:, finish:)
- projects = build_relation(start, finish)
-
- projects.with_route.find_each(batch_size: BATCH_SIZE) do |project|
- rollback(project)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # Flag a project to be migrated to Hashed Storage
- #
- # @param [Project] project that will be migrated
- def migrate(project)
- Gitlab::AppLogger.info "Starting storage migration of #{project.full_path} (ID=#{project.id})..."
-
- project.migrate_to_hashed_storage!
- rescue StandardError => err
- Gitlab::AppLogger.error("#{err.message} migrating storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}")
- end
-
- # Flag a project to be rolled-back to Legacy Storage
- #
- # @param [Project] project that will be rolled-back
- def rollback(project)
- Gitlab::AppLogger.info "Starting storage rollback of #{project.full_path} (ID=#{project.id})..."
-
- project.rollback_to_legacy_storage!
- rescue StandardError => err
- Gitlab::AppLogger.error("#{err.message} rolling-back storage of #{project.full_path} (ID=#{project.id}), trace - #{err.backtrace}")
- end
-
- # Returns whether we have any pending storage migration
- #
- def migration_pending?
- any_non_empty_queue?(::HashedStorage::MigratorWorker, ::HashedStorage::ProjectMigrateWorker)
- end
-
- # Returns whether we have any pending storage rollback
- #
- def rollback_pending?
- any_non_empty_queue?(::HashedStorage::RollbackerWorker, ::HashedStorage::ProjectRollbackWorker)
- end
-
- # Remove all remaining scheduled rollback operations
- #
- def abort_rollback!
- [::HashedStorage::RollbackerWorker, ::HashedStorage::ProjectRollbackWorker].each do |worker|
- Sidekiq::Queue.new(worker.queue).clear
- end
- end
-
- private
-
- def any_non_empty_queue?(*workers)
- workers.any? do |worker|
- Sidekiq::Queue.new(worker.queue).size != 0 # rubocop:disable Style/ZeroLengthPredicate
- end
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def build_relation(start, finish)
- relation = Project
- table = Project.arel_table
-
- relation = relation.where(table[:id].gteq(start)) if start
- relation = relation.where(table[:id].lteq(finish)) if finish
-
- relation
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
- end
-end
diff --git a/lib/gitlab/hashed_storage/rake_helper.rb b/lib/gitlab/hashed_storage/rake_helper.rb
deleted file mode 100644
index d3468569e5e..00000000000
--- a/lib/gitlab/hashed_storage/rake_helper.rb
+++ /dev/null
@@ -1,129 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module HashedStorage
- module RakeHelper
- def self.batch_size
- ENV.fetch('BATCH', 200).to_i
- end
-
- def self.listing_limit
- ENV.fetch('LIMIT', 500).to_i
- end
-
- def self.range_from
- ENV['ID_FROM']
- end
-
- def self.range_to
- ENV['ID_TO']
- end
-
- def self.using_ranges?
- !range_from.nil? && !range_to.nil?
- end
-
- def self.range_single_item?
- using_ranges? && range_from == range_to
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def self.project_id_batches_migration(&block)
- Project.with_unmigrated_storage.in_batches(of: batch_size, start: range_from, finish: range_to) do |relation| # rubocop: disable Cop/InBatches
- ids = relation.pluck(:id)
-
- yield ids.min, ids.max
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def self.project_id_batches_rollback(&block)
- Project.with_storage_feature(:repository).in_batches(of: batch_size, start: range_from, finish: range_to) do |relation| # rubocop: disable Cop/InBatches
- ids = relation.pluck(:id)
-
- yield ids.min, ids.max
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def self.legacy_attachments_relation
- Upload.inner_join_local_uploads_projects.merge(Project.without_storage_feature(:attachments))
- end
-
- def self.hashed_attachments_relation
- Upload.inner_join_local_uploads_projects.merge(Project.with_storage_feature(:attachments))
- end
-
- def self.relation_summary(relation_name, relation)
- relation_count = relation.count
- $stdout.puts "* Found #{relation_count} #{relation_name}".color(:green)
-
- relation_count
- end
-
- def self.projects_list(relation_name, relation)
- listing(relation_name, relation.with_route) do |project|
- $stdout.puts " - #{project.full_path} (id: #{project.id})".color(:red)
- $stdout.puts " #{project.repository.disk_path}"
- end
- end
-
- def self.attachments_list(relation_name, relation)
- listing(relation_name, relation) do |upload|
- $stdout.puts " - #{upload.path} (id: #{upload.id})".color(:red)
- end
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def self.listing(relation_name, relation)
- relation_count = relation_summary(relation_name, relation)
- return unless relation_count > 0
-
- limit = listing_limit
-
- if relation_count > limit
- $stdout.puts " ! Displaying first #{limit} #{relation_name}..."
- end
-
- relation.find_each(batch_size: batch_size).with_index do |element, index|
- yield element
-
- break if index + 1 >= limit
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def self.prune(relation_name, relation, dry_run: true, root: nil)
- root ||= '../repositories'
-
- known_paths = Set.new
- listing(relation_name, relation) { |p| known_paths << "#{root}/#{p.repository.disk_path}" }
-
- marked_for_deletion = Set.new(Dir["#{root}/@hashed/*/*/*"])
- marked_for_deletion.reject! do |path|
- base = path.gsub(/\.(\w+\.)?git$/, '')
- known_paths.include?(base)
- end
-
- if marked_for_deletion.empty?
- $stdout.puts "No orphaned directories found. Nothing to do!"
- else
- n = marked_for_deletion.size
- $stdout.puts "Found #{n} orphaned #{'directory'.pluralize(n)}"
- $stdout.puts "Dry run. (Run again with FORCE=1 to delete). We would have deleted:" if dry_run
- end
-
- marked_for_deletion.each do |p|
- p = Pathname.new(p)
- if dry_run
- $stdout.puts " - #{p}"
- else
- $stdout.puts "Removing #{p}"
- p.rmtree
- end
- end
- end
- end
- end
-end