Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-05-27 21:10:52 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-05-27 21:10:52 +0300
commit479221aa79c2e18497589f0aef175a06fb5f5e29 (patch)
tree08816013065674d37dba23fb0de010d257ae55be /lib
parentf719944deedf392d98947cb1c499169696c8da70 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'lib')
-rw-r--r--lib/bulk_imports/common/extractors/ndjson_extractor.rb68
-rw-r--r--lib/bulk_imports/groups/graphql/get_labels_query.rb53
-rw-r--r--lib/bulk_imports/groups/pipelines/entity_finisher.rb4
-rw-r--r--lib/bulk_imports/groups/pipelines/labels_pipeline.rb32
-rw-r--r--lib/bulk_imports/ndjson_pipeline.rb63
-rw-r--r--lib/bulk_imports/pipeline.rb21
-rw-r--r--lib/bulk_imports/pipeline/context.rb8
-rw-r--r--lib/bulk_imports/pipeline/extracted_data.rb2
8 files changed, 190 insertions, 61 deletions
diff --git a/lib/bulk_imports/common/extractors/ndjson_extractor.rb b/lib/bulk_imports/common/extractors/ndjson_extractor.rb
new file mode 100644
index 00000000000..27dfb0dcce5
--- /dev/null
+++ b/lib/bulk_imports/common/extractors/ndjson_extractor.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module BulkImports
+ module Common
+ module Extractors
+ class NdjsonExtractor
+ include Gitlab::ImportExport::CommandLineUtil
+ include Gitlab::Utils::StrongMemoize
+
+ EXPORT_DOWNLOAD_URL_PATH = "/%{resource}/%{full_path}/export_relations/download?relation=%{relation}"
+
+ def initialize(relation:)
+ @relation = relation
+ @tmp_dir = Dir.mktmpdir
+ end
+
+ def extract(context)
+ download_service(tmp_dir, context).execute
+ decompression_service(tmp_dir).execute
+ relations = ndjson_reader(tmp_dir).consume_relation('', relation)
+
+ BulkImports::Pipeline::ExtractedData.new(data: relations)
+ end
+
+ def remove_tmp_dir
+ FileUtils.remove_entry(tmp_dir)
+ end
+
+ private
+
+ attr_reader :relation, :tmp_dir
+
+ def filename
+ @filename ||= "#{relation}.ndjson.gz"
+ end
+
+ def download_service(tmp_dir, context)
+ @download_service ||= BulkImports::FileDownloadService.new(
+ configuration: context.configuration,
+ relative_url: relative_resource_url(context),
+ dir: tmp_dir,
+ filename: filename
+ )
+ end
+
+ def decompression_service(tmp_dir)
+ @decompression_service ||= BulkImports::FileDecompressionService.new(
+ dir: tmp_dir,
+ filename: filename
+ )
+ end
+
+ def ndjson_reader(tmp_dir)
+ @ndjson_reader ||= Gitlab::ImportExport::JSON::NdjsonReader.new(tmp_dir)
+ end
+
+ def relative_resource_url(context)
+ strong_memoize(:relative_resource_url) do
+ resource = context.portable.class.name.downcase.pluralize
+ encoded_full_path = context.entity.encoded_source_full_path
+
+ EXPORT_DOWNLOAD_URL_PATH % { resource: resource, full_path: encoded_full_path, relation: relation }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/bulk_imports/groups/graphql/get_labels_query.rb b/lib/bulk_imports/groups/graphql/get_labels_query.rb
deleted file mode 100644
index f957cf0be52..00000000000
--- a/lib/bulk_imports/groups/graphql/get_labels_query.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-module BulkImports
- module Groups
- module Graphql
- module GetLabelsQuery
- extend self
-
- def to_s
- <<-'GRAPHQL'
- query ($full_path: ID!, $cursor: String, $per_page: Int) {
- group(fullPath: $full_path) {
- labels(first: $per_page, after: $cursor, onlyGroupLabels: true) {
- page_info: pageInfo {
- next_page: endCursor
- has_next_page: hasNextPage
- }
- nodes {
- title
- description
- color
- created_at: createdAt
- updated_at: updatedAt
- }
- }
- }
- }
- GRAPHQL
- end
-
- def variables(context)
- {
- full_path: context.entity.source_full_path,
- cursor: context.tracker.next_page,
- per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
- }
- end
-
- def base_path
- %w[data group labels]
- end
-
- def data_path
- base_path << 'nodes'
- end
-
- def page_info_path
- base_path << 'page_info'
- end
- end
- end
- end
-end
diff --git a/lib/bulk_imports/groups/pipelines/entity_finisher.rb b/lib/bulk_imports/groups/pipelines/entity_finisher.rb
index 1d237bc0f7f..afe954a64d6 100644
--- a/lib/bulk_imports/groups/pipelines/entity_finisher.rb
+++ b/lib/bulk_imports/groups/pipelines/entity_finisher.rb
@@ -4,6 +4,10 @@ module BulkImports
module Groups
module Pipelines
class EntityFinisher
+ def self.ndjson_pipeline?
+ false
+ end
+
def initialize(context)
@context = context
end
diff --git a/lib/bulk_imports/groups/pipelines/labels_pipeline.rb b/lib/bulk_imports/groups/pipelines/labels_pipeline.rb
index 0dc4a968b84..806db68e5d1 100644
--- a/lib/bulk_imports/groups/pipelines/labels_pipeline.rb
+++ b/lib/bulk_imports/groups/pipelines/labels_pipeline.rb
@@ -4,15 +4,35 @@ module BulkImports
module Groups
module Pipelines
class LabelsPipeline
- include Pipeline
+ include NdjsonPipeline
- extractor BulkImports::Common::Extractors::GraphqlExtractor,
- query: BulkImports::Groups::Graphql::GetLabelsQuery
+ RELATION = 'labels'
- transformer Common::Transformers::ProhibitedAttributesTransformer
+ extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: RELATION
- def load(context, data)
- Labels::CreateService.new(data).execute(group: context.group)
+ def transform(context, data)
+ relation_hash = data.first
+ relation_index = data.last
+ relation_definition = import_export_config.top_relation_tree(RELATION)
+
+ deep_transform_relation!(relation_hash, RELATION, relation_definition) do |key, hash|
+ Gitlab::ImportExport::Group::RelationFactory.create(
+ relation_index: relation_index,
+ relation_sym: key.to_sym,
+ relation_hash: hash,
+ importable: context.portable,
+ members_mapper: nil,
+ object_builder: object_builder,
+ user: context.current_user,
+ excluded_keys: import_export_config.relation_excluded_keys(key)
+ )
+ end
+ end
+
+ def load(_, label)
+ return unless label
+
+ label.save! unless label.persisted?
end
end
end
diff --git a/lib/bulk_imports/ndjson_pipeline.rb b/lib/bulk_imports/ndjson_pipeline.rb
new file mode 100644
index 00000000000..4f5f94c30b8
--- /dev/null
+++ b/lib/bulk_imports/ndjson_pipeline.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+module BulkImports
+ module NdjsonPipeline
+ extend ActiveSupport::Concern
+
+ include Pipeline
+
+ included do
+ ndjson_pipeline!
+
+ def deep_transform_relation!(relation_hash, relation_key, relation_definition, &block)
+ relation_key = relation_key_override(relation_key)
+
+ relation_definition.each do |sub_relation_key, sub_relation_definition|
+ sub_relation = relation_hash[sub_relation_key]
+
+ next unless sub_relation
+
+ current_item =
+ if sub_relation.is_a?(Array)
+ sub_relation
+ .map { |entry| deep_transform_relation!(entry, sub_relation_key, sub_relation_definition, &block) }
+ .tap { |entry| entry.compact! }
+ .presence
+ else
+ deep_transform_relation!(sub_relation, sub_relation_key, sub_relation_definition, &block)
+ end
+
+ if current_item
+ relation_hash[sub_relation_key] = current_item
+ else
+ relation_hash.delete(sub_relation_key)
+ end
+ end
+
+ yield(relation_key, relation_hash)
+ end
+
+ def after_run(_)
+ extractor.remove_tmp_dir if extractor.respond_to?(:remove_tmp_dir)
+ end
+
+ def relation_class(relation_key)
+ relation_key.to_s.classify.constantize
+ rescue NameError
+ relation_key.to_s.constantize
+ end
+
+ def relation_key_override(relation_key)
+ relation_key_overrides[relation_key.to_sym]&.to_s || relation_key
+ end
+
+ def relation_key_overrides
+ "Gitlab::ImportExport::#{portable.class}::RelationFactory::OVERRIDES".constantize
+ end
+
+ def object_builder
+ "Gitlab::ImportExport::#{portable.class}::ObjectBuilder".constantize
+ end
+ end
+ end
+end
diff --git a/lib/bulk_imports/pipeline.rb b/lib/bulk_imports/pipeline.rb
index df4f020d6b2..24aa7482974 100644
--- a/lib/bulk_imports/pipeline.rb
+++ b/lib/bulk_imports/pipeline.rb
@@ -8,8 +8,11 @@ module BulkImports
include Runner
NotAllowedError = Class.new(StandardError)
+ ExpiredError = Class.new(StandardError)
+ FailedError = Class.new(StandardError)
CACHE_KEY_EXPIRATION = 2.hours
+ NDJSON_EXPORT_TIMEOUT = 30.minutes
def initialize(context)
@context = context
@@ -19,6 +22,14 @@ module BulkImports
@tracker ||= context.tracker
end
+ def portable
+ @portable ||= context.portable
+ end
+
+ def import_export_config
+ @import_export_config ||= context.import_export_config
+ end
+
included do
private
@@ -111,7 +122,7 @@ module BulkImports
options = class_config[:options]
if options
- class_config[:klass].new(class_config[:options])
+ class_config[:klass].new(**class_config[:options])
else
class_config[:klass].new
end
@@ -155,6 +166,14 @@ module BulkImports
class_attributes[:abort_on_failure]
end
+ def ndjson_pipeline!
+ class_attributes[:ndjson_pipeline] = true
+ end
+
+ def ndjson_pipeline?
+ class_attributes[:ndjson_pipeline]
+ end
+
private
def add_attribute(sym, klass, options)
diff --git a/lib/bulk_imports/pipeline/context.rb b/lib/bulk_imports/pipeline/context.rb
index 3c69c729f36..d753f888671 100644
--- a/lib/bulk_imports/pipeline/context.rb
+++ b/lib/bulk_imports/pipeline/context.rb
@@ -16,6 +16,14 @@ module BulkImports
@entity ||= tracker.entity
end
+ def portable
+ @portable ||= entity.group || entity.project
+ end
+
+ def import_export_config
+ @import_export_config ||= ::BulkImports::FileTransfer.config_for(portable)
+ end
+
def group
@group ||= entity.group
end
diff --git a/lib/bulk_imports/pipeline/extracted_data.rb b/lib/bulk_imports/pipeline/extracted_data.rb
index c9e54b61dd3..0b36c068298 100644
--- a/lib/bulk_imports/pipeline/extracted_data.rb
+++ b/lib/bulk_imports/pipeline/extracted_data.rb
@@ -6,7 +6,7 @@ module BulkImports
attr_reader :data
def initialize(data: nil, page_info: {})
- @data = Array.wrap(data)
+ @data = data.is_a?(Enumerator) ? data : Array.wrap(data)
@page_info = page_info
end