Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-01-20 12:16:11 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-01-20 12:16:11 +0300
commitedaa33dee2ff2f7ea3fac488d41558eb5f86d68c (patch)
tree11f143effbfeba52329fb7afbd05e6e2a3790241 /lib/bulk_imports
parentd8a5691316400a0f7ec4f83832698f1988eb27c1 (diff)
Add latest changes from gitlab-org/gitlab@14-7-stable-eev14.7.0-rc42
Diffstat (limited to 'lib/bulk_imports')
-rw-r--r--lib/bulk_imports/common/extractors/ndjson_extractor.rb34
-rw-r--r--lib/bulk_imports/common/pipelines/uploads_pipeline.rb14
-rw-r--r--lib/bulk_imports/ndjson_pipeline.rb2
-rw-r--r--lib/bulk_imports/projects/pipelines/project_attributes_pipeline.rb31
4 files changed, 40 insertions, 41 deletions
diff --git a/lib/bulk_imports/common/extractors/ndjson_extractor.rb b/lib/bulk_imports/common/extractors/ndjson_extractor.rb
index ecd7c08bd25..04febebff8e 100644
--- a/lib/bulk_imports/common/extractors/ndjson_extractor.rb
+++ b/lib/bulk_imports/common/extractors/ndjson_extractor.rb
@@ -4,49 +4,47 @@ module BulkImports
module Common
module Extractors
class NdjsonExtractor
- include Gitlab::ImportExport::CommandLineUtil
- include Gitlab::Utils::StrongMemoize
-
def initialize(relation:)
@relation = relation
- @tmp_dir = Dir.mktmpdir
+ @tmpdir = Dir.mktmpdir
end
def extract(context)
- download_service(tmp_dir, context).execute
- decompression_service(tmp_dir).execute
- relations = ndjson_reader(tmp_dir).consume_relation('', relation)
+ download_service(context).execute
+ decompression_service.execute
+
+ records = ndjson_reader.consume_relation('', relation)
- BulkImports::Pipeline::ExtractedData.new(data: relations)
+ BulkImports::Pipeline::ExtractedData.new(data: records)
end
- def remove_tmp_dir
- FileUtils.remove_entry(tmp_dir)
+ def remove_tmpdir
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
private
- attr_reader :relation, :tmp_dir
+ attr_reader :relation, :tmpdir
def filename
- @filename ||= "#{relation}.ndjson.gz"
+ "#{relation}.ndjson.gz"
end
- def download_service(tmp_dir, context)
+ def download_service(context)
@download_service ||= BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: context.entity.relation_download_url_path(relation),
- dir: tmp_dir,
+ tmpdir: tmpdir,
filename: filename
)
end
- def decompression_service(tmp_dir)
- @decompression_service ||= BulkImports::FileDecompressionService.new(dir: tmp_dir, filename: filename)
+ def decompression_service
+ @decompression_service ||= BulkImports::FileDecompressionService.new(tmpdir: tmpdir, filename: filename)
end
- def ndjson_reader(tmp_dir)
- @ndjson_reader ||= Gitlab::ImportExport::Json::NdjsonReader.new(tmp_dir)
+ def ndjson_reader
+ @ndjson_reader ||= Gitlab::ImportExport::Json::NdjsonReader.new(tmpdir)
end
end
end
diff --git a/lib/bulk_imports/common/pipelines/uploads_pipeline.rb b/lib/bulk_imports/common/pipelines/uploads_pipeline.rb
index 2ac4e533c1d..d7b9d6920ea 100644
--- a/lib/bulk_imports/common/pipelines/uploads_pipeline.rb
+++ b/lib/bulk_imports/common/pipelines/uploads_pipeline.rb
@@ -15,7 +15,7 @@ module BulkImports
decompression_service.execute
extraction_service.execute
- upload_file_paths = Dir.glob(File.join(tmp_dir, '**', '*'))
+ upload_file_paths = Dir.glob(File.join(tmpdir, '**', '*'))
BulkImports::Pipeline::ExtractedData.new(data: upload_file_paths)
end
@@ -37,7 +37,7 @@ module BulkImports
end
def after_run(_)
- FileUtils.remove_entry(tmp_dir) if Dir.exist?(tmp_dir)
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
private
@@ -46,17 +46,17 @@ module BulkImports
BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: context.entity.relation_download_url_path(relation),
- dir: tmp_dir,
+ tmpdir: tmpdir,
filename: targz_filename
)
end
def decompression_service
- BulkImports::FileDecompressionService.new(dir: tmp_dir, filename: targz_filename)
+ BulkImports::FileDecompressionService.new(tmpdir: tmpdir, filename: targz_filename)
end
def extraction_service
- BulkImports::ArchiveExtractionService.new(tmpdir: tmp_dir, filename: tar_filename)
+ BulkImports::ArchiveExtractionService.new(tmpdir: tmpdir, filename: tar_filename)
end
def relation
@@ -71,8 +71,8 @@ module BulkImports
"#{tar_filename}.gz"
end
- def tmp_dir
- @tmp_dir ||= Dir.mktmpdir('bulk_imports')
+ def tmpdir
+ @tmpdir ||= Dir.mktmpdir('bulk_imports')
end
def file_uploader
diff --git a/lib/bulk_imports/ndjson_pipeline.rb b/lib/bulk_imports/ndjson_pipeline.rb
index d5475a8b324..d85e51984df 100644
--- a/lib/bulk_imports/ndjson_pipeline.rb
+++ b/lib/bulk_imports/ndjson_pipeline.rb
@@ -68,7 +68,7 @@ module BulkImports
end
def after_run(_)
- extractor.remove_tmp_dir if extractor.respond_to?(:remove_tmp_dir)
+ extractor.remove_tmpdir if extractor.respond_to?(:remove_tmpdir)
end
def relation_class(relation_key)
diff --git a/lib/bulk_imports/projects/pipelines/project_attributes_pipeline.rb b/lib/bulk_imports/projects/pipelines/project_attributes_pipeline.rb
index 4d742225ff7..2492a023cbe 100644
--- a/lib/bulk_imports/projects/pipelines/project_attributes_pipeline.rb
+++ b/lib/bulk_imports/projects/pipelines/project_attributes_pipeline.rb
@@ -8,15 +8,16 @@ module BulkImports
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
- def extract(context)
- download_service(tmp_dir, context).execute
- decompression_service(tmp_dir).execute
+ def extract(_context)
+ download_service.execute
+ decompression_service.execute
+
project_attributes = json_decode(json_attributes)
BulkImports::Pipeline::ExtractedData.new(data: project_attributes)
end
- def transform(_, data)
+ def transform(_context, data)
subrelations = config.portable_relations_tree.keys.map(&:to_s)
Gitlab::ImportExport::AttributeCleaner.clean(
@@ -26,42 +27,42 @@ module BulkImports
).except(*subrelations)
end
- def load(_, data)
+ def load(_context, data)
portable.assign_attributes(data)
portable.reconcile_shared_runners_setting!
portable.drop_visibility_level!
portable.save!
end
- def after_run(_)
- FileUtils.remove_entry(tmp_dir)
+ def after_run(_context)
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
def json_attributes
- @json_attributes ||= File.read(File.join(tmp_dir, filename))
+ @json_attributes ||= File.read(File.join(tmpdir, filename))
end
private
- def tmp_dir
- @tmp_dir ||= Dir.mktmpdir
+ def tmpdir
+ @tmpdir ||= Dir.mktmpdir('bulk_imports')
end
def config
@config ||= BulkImports::FileTransfer.config_for(portable)
end
- def download_service(tmp_dir, context)
+ def download_service
@download_service ||= BulkImports::FileDownloadService.new(
configuration: context.configuration,
- relative_url: context.entity.relation_download_url_path(BulkImports::FileTransfer::BaseConfig::SELF_RELATION),
- dir: tmp_dir,
+ relative_url: context.entity.relation_download_url_path(BulkImports::FileTransfer::BaseConfig::SELF_RELATION),
+ tmpdir: tmpdir,
filename: compressed_filename
)
end
- def decompression_service(tmp_dir)
- @decompression_service ||= BulkImports::FileDecompressionService.new(dir: tmp_dir, filename: compressed_filename)
+ def decompression_service
+ @decompression_service ||= BulkImports::FileDecompressionService.new(tmpdir: tmpdir, filename: compressed_filename)
end
def compressed_filename