Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/bulk_imports')
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb42
-rw-r--r--spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb53
-rw-r--r--spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb35
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb49
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb34
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb102
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb110
-rw-r--r--spec/lib/bulk_imports/ndjson_pipeline_spec.rb186
-rw-r--r--spec/lib/bulk_imports/pipeline/context_spec.rb23
-rw-r--r--spec/lib/bulk_imports/pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/stage_spec.rb3
13 files changed, 454 insertions, 189 deletions
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 213fa23675e..ac42f12a3d4 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Clients::Http do
+RSpec.describe BulkImports::Clients::HTTP do
include ImportSpecHelper
let(:uri) { 'http://gitlab.example' }
@@ -48,6 +48,7 @@ RSpec.describe BulkImports::Clients::Http do
[
'http://gitlab.example:80/api/v4/resource',
hash_including(
+ follow_redirects: false,
query: {
page: described_class::DEFAULT_PAGE,
per_page: described_class::DEFAULT_PER_PAGE
@@ -118,6 +119,7 @@ RSpec.describe BulkImports::Clients::Http do
'http://gitlab.example:80/api/v4/resource',
hash_including(
body: {},
+ follow_redirects: false,
headers: {
'Content-Type' => 'application/json',
'Authorization' => "Bearer #{token}"
@@ -127,4 +129,42 @@ RSpec.describe BulkImports::Clients::Http do
end
end
end
+
+ describe '#head' do
+ let(:method) { :head }
+
+ include_examples 'performs network request' do
+ let(:expected_args) do
+ [
+ 'http://gitlab.example:80/api/v4/resource',
+ hash_including(
+ follow_redirects: false,
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'Authorization' => "Bearer #{token}"
+ }
+ )
+ ]
+ end
+ end
+ end
+
+ describe '#stream' do
+ it 'performs network request with stream_body option' do
+ expected_args = [
+ 'http://gitlab.example:80/api/v4/resource',
+ hash_including(
+ stream_body: true,
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'Authorization' => "Bearer #{token}"
+ }
+ )
+ ]
+
+ expect(Gitlab::HTTP).to receive(:get).with(*expected_args).and_return(response_double)
+
+ subject.stream(resource)
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb
new file mode 100644
index 00000000000..bd306233de8
--- /dev/null
+++ b/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do
+ let_it_be(:tmpdir) { Dir.mktmpdir }
+ let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/labels.ndjson.gz' }
+ let_it_be(:import) { create(:bulk_import) }
+ let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
+ let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject { described_class.new(relation: 'labels') }
+
+ before do
+ allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
+
+ subject.instance_variable_set(:@tmp_dir, tmpdir)
+ end
+
+ after(:all) do
+ FileUtils.remove_entry(tmpdir) if File.directory?(tmpdir)
+ end
+
+ describe '#extract' do
+ before do
+ FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz'))
+
+ allow_next_instance_of(BulkImports::FileDownloadService) do |service|
+ allow(service).to receive(:execute)
+ end
+ end
+
+ it 'returns ExtractedData' do
+ extracted_data = subject.extract(context)
+ label = extracted_data.data.first.first
+
+ expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
+ expect(label['title']).to include('Label')
+ expect(label['description']).to include('Label')
+ expect(label['type']).to eq('GroupLabel')
+ end
+ end
+
+ describe '#remove_tmp_dir' do
+ it 'removes tmp dir' do
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).once
+
+ subject.remove_tmp_dir
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb
index 721dacbe3f4..5ee5cdbe413 100644
--- a/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb
+++ b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe BulkImports::Common::Extractors::RestExtractor do
- let(:http_client) { instance_double(BulkImports::Clients::Http) }
+ let(:http_client) { instance_double(BulkImports::Clients::HTTP) }
let(:options) { { query: double(to_h: { resource: nil, query: nil }) } }
let(:response) { double(parsed_response: { 'data' => { 'foo' => 'bar' } }, headers: { 'x-next-page' => '2' }) }
diff --git a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
index ac8786440e9..f7485b188ce 100644
--- a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
+++ b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe BulkImports::Groups::Extractors::SubgroupsExtractor do
response = [{ 'test' => 'group' }]
context = BulkImports::Pipeline::Context.new(tracker)
- allow_next_instance_of(BulkImports::Clients::Http) do |client|
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
allow(client).to receive(:each_page).and_return(response)
end
diff --git a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
deleted file mode 100644
index 61db644a372..00000000000
--- a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do
- it 'has a valid query' do
- tracker = create(:bulk_import_tracker)
- context = BulkImports::Pipeline::Context.new(tracker)
-
- query = GraphQL::Query.new(
- GitlabSchema,
- described_class.to_s,
- variables: described_class.variables(context)
- )
- result = GitlabSchema.static_validator.validate(query)
-
- expect(result[:errors]).to be_empty
- end
-
- describe '#data_path' do
- it 'returns data path' do
- expected = %w[data group labels nodes]
-
- expect(described_class.data_path).to eq(expected)
- end
- end
-
- describe '#page_info_path' do
- it 'returns pagination information path' do
- expected = %w[data group labels page_info]
-
- expect(described_class.page_info_path).to eq(expected)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb
new file mode 100644
index 00000000000..8b2f03ca15f
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/boards_pipeline_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::BoardsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/boards.ndjson.gz' }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ group: group,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:tmpdir) { Dir.mktmpdir }
+
+ before do
+ FileUtils.copy_file(filepath, File.join(tmpdir, 'boards.ndjson.gz'))
+ group.add_owner(user)
+ end
+
+ subject { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports group boards into destination group and removes tmpdir' do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ allow_next_instance_of(BulkImports::FileDownloadService) do |service|
+ allow(service).to receive(:execute)
+ end
+
+ expect { subject.run }.to change(Board, :count).by(1)
+
+ lists = group.boards.find_by(name: 'first board').lists
+
+ expect(lists.count).to eq(3)
+ expect(lists.first.label.title).to eq('TSL')
+ expect(lists.second.label.title).to eq('Sosync')
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
index 8276349c5f4..b97aeb435b9 100644
--- a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
@@ -25,13 +25,33 @@ RSpec.describe BulkImports::Groups::Pipelines::EntityFinisher do
.to change(entity, :status_name).to(:finished)
end
- it 'does nothing when the entity is already finished' do
- entity = create(:bulk_import_entity, :finished)
- pipeline_tracker = create(:bulk_import_tracker, entity: entity)
- context = BulkImports::Pipeline::Context.new(pipeline_tracker)
- subject = described_class.new(context)
+ context 'when entity is in a final finished or failed state' do
+ shared_examples 'performs no state update' do |entity_state|
+ it 'does nothing' do
+ entity = create(:bulk_import_entity, entity_state)
+ pipeline_tracker = create(:bulk_import_tracker, entity: entity)
+ context = BulkImports::Pipeline::Context.new(pipeline_tracker)
+ subject = described_class.new(context)
- expect { subject.run }
- .not_to change(entity, :status_name)
+ expect { subject.run }
+ .not_to change(entity, :status_name)
+ end
+ end
+
+ include_examples 'performs no state update', :finished
+ include_examples 'performs no state update', :failed
+ end
+
+ context 'when all entity trackers failed' do
+ it 'marks entity as failed' do
+ entity = create(:bulk_import_entity, :started)
+ create(:bulk_import_tracker, :failed, entity: entity)
+ pipeline_tracker = create(:bulk_import_tracker, entity: entity, relation: described_class)
+ context = BulkImports::Pipeline::Context.new(pipeline_tracker)
+
+ described_class.new(context).run
+
+ expect(entity.reload.failed?).to eq(true)
+ end
end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
index 8af646d1101..6344dae0fb7 100644
--- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
@@ -5,98 +5,74 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
-
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/labels.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
+ group: group,
+ bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
- destination_namespace: group.full_path,
- group: group
+ destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:tmpdir) { Dir.mktmpdir }
+
+ before do
+ FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz'))
+ group.add_owner(user)
+ end
+
subject { described_class.new(context) }
describe '#run' do
- it 'imports a group labels' do
- first_page = extracted_data(title: 'label1', has_next_page: true)
- last_page = extracted_data(title: 'label2')
-
- allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
- allow(extractor)
- .to receive(:extract)
- .and_return(first_page, last_page)
+ it 'imports group labels into destination group and removes tmpdir' do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ allow_next_instance_of(BulkImports::FileDownloadService) do |service|
+ allow(service).to receive(:execute)
end
- expect { subject.run }.to change(Label, :count).by(2)
+ expect { subject.run }.to change(::GroupLabel, :count).by(1)
- label = group.labels.order(:created_at).last
+ label = group.labels.first
- expect(label.title).to eq('label2')
- expect(label.description).to eq('desc')
- expect(label.color).to eq('#428BCA')
- expect(label.created_at).to eq(timestamp)
- expect(label.updated_at).to eq(timestamp)
+ expect(label.title).to eq('Label 1')
+ expect(label.description).to eq('Label 1')
+ expect(label.color).to eq('#6699cc')
+ expect(File.directory?(tmpdir)).to eq(false)
end
end
describe '#load' do
- it 'creates the label' do
- data = label_data('label')
+ context 'when label is not persisted' do
+ it 'saves the label' do
+ label = build(:group_label, group: group)
- expect { subject.load(context, data) }.to change(Label, :count).by(1)
+ expect(label).to receive(:save!)
- label = group.labels.first
-
- data.each do |key, value|
- expect(label[key]).to eq(value)
+ subject.load(context, label)
end
end
- end
- describe 'pipeline parts' do
- it { expect(described_class).to include_module(BulkImports::Pipeline) }
- it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
-
- it 'has extractors' do
- expect(described_class.get_extractor)
- .to eq(
- klass: BulkImports::Common::Extractors::GraphqlExtractor,
- options: {
- query: BulkImports::Groups::Graphql::GetLabelsQuery
- }
- )
- end
+ context 'when label is persisted' do
+ it 'does not save label' do
+ label = create(:group_label, group: group)
- it 'has transformers' do
- expect(described_class.transformers)
- .to contain_exactly(
- { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
- )
- end
- end
-
- def label_data(title)
- {
- 'title' => title,
- 'description' => 'desc',
- 'color' => '#428BCA',
- 'created_at' => timestamp.to_s,
- 'updated_at' => timestamp.to_s
- }
- end
+ expect(label).not_to receive(:save!)
- def extracted_data(title:, has_next_page: false)
- page_info = {
- 'has_next_page' => has_next_page,
- 'next_page' => has_next_page ? 'cursor' : nil
- }
+ subject.load(context, label)
+ end
+ end
- BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info)
+ context 'when label is missing' do
+ it 'returns' do
+ expect(subject.load(context, nil)).to be_nil
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
index e5cf75c566b..a8354e62459 100644
--- a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
@@ -5,119 +5,69 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
-
+ let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/milestones.ndjson.gz' }
let_it_be(:entity) do
create(
:bulk_import_entity,
+ group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
- destination_namespace: group.full_path,
- group: group
+ destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- subject { described_class.new(context) }
+ let(:tmpdir) { Dir.mktmpdir }
before do
+ FileUtils.copy_file(filepath, File.join(tmpdir, 'milestones.ndjson.gz'))
group.add_owner(user)
end
- describe '#run' do
- it 'imports group milestones' do
- first_page = extracted_data(title: 'milestone1', iid: 1, has_next_page: true)
- last_page = extracted_data(title: 'milestone2', iid: 2)
+ subject { described_class.new(context) }
- allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
- allow(extractor)
- .to receive(:extract)
- .and_return(first_page, last_page)
+ describe '#run' do
+ it 'imports group milestones into destination group and removes tmpdir' do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ allow_next_instance_of(BulkImports::FileDownloadService) do |service|
+ allow(service).to receive(:execute)
end
- expect { subject.run }.to change(Milestone, :count).by(2)
-
- expect(group.milestones.pluck(:title)).to contain_exactly('milestone1', 'milestone2')
-
- milestone = group.milestones.last
-
- expect(milestone.description).to eq('desc')
- expect(milestone.state).to eq('closed')
- expect(milestone.start_date.to_s).to eq('2020-10-21')
- expect(milestone.due_date.to_s).to eq('2020-10-22')
- expect(milestone.created_at).to eq(timestamp)
- expect(milestone.updated_at).to eq(timestamp)
+ expect { subject.run }.to change(Milestone, :count).by(5)
+ expect(group.milestones.pluck(:title)).to contain_exactly('v4.0', 'v3.0', 'v2.0', 'v1.0', 'v0.0')
+ expect(File.directory?(tmpdir)).to eq(false)
end
end
describe '#load' do
- it 'creates the milestone' do
- data = milestone_data('milestone')
-
- expect { subject.load(context, data) }.to change(Milestone, :count).by(1)
- end
-
- context 'when user is not authorized to create the milestone' do
- before do
- allow(user).to receive(:can?).with(:admin_milestone, group).and_return(false)
- end
+ context 'when milestone is not persisted' do
+ it 'saves the milestone' do
+ milestone = build(:milestone, group: group)
- it 'raises NotAllowedError' do
- data = extracted_data(title: 'milestone')
+ expect(milestone).to receive(:save!)
- expect { subject.load(context, data) }.to raise_error(::BulkImports::Pipeline::NotAllowedError)
+ subject.load(context, milestone)
end
end
- end
- describe 'pipeline parts' do
- it { expect(described_class).to include_module(BulkImports::Pipeline) }
- it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
+ context 'when milestone is persisted' do
+ it 'does not save milestone' do
+ milestone = create(:milestone, group: group)
- it 'has extractors' do
- expect(described_class.get_extractor)
- .to eq(
- klass: BulkImports::Common::Extractors::GraphqlExtractor,
- options: {
- query: BulkImports::Groups::Graphql::GetMilestonesQuery
- }
- )
- end
+ expect(milestone).not_to receive(:save!)
- it 'has transformers' do
- expect(described_class.transformers)
- .to contain_exactly(
- { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
- )
+ subject.load(context, milestone)
+ end
end
- end
- def milestone_data(title, iid: 1)
- {
- 'title' => title,
- 'description' => 'desc',
- 'iid' => iid,
- 'state' => 'closed',
- 'start_date' => '2020-10-21',
- 'due_date' => '2020-10-22',
- 'created_at' => timestamp.to_s,
- 'updated_at' => timestamp.to_s
- }
- end
-
- def extracted_data(title:, iid: 1, has_next_page: false)
- page_info = {
- 'has_next_page' => has_next_page,
- 'next_page' => has_next_page ? 'cursor' : nil
- }
-
- BulkImports::Pipeline::ExtractedData.new(
- data: milestone_data(title, iid: iid),
- page_info: page_info
- )
+ context 'when milestone is missing' do
+ it 'returns' do
+ expect(subject.load(context, nil)).to be_nil
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
new file mode 100644
index 00000000000..a5d1a5f7fbb
--- /dev/null
+++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
@@ -0,0 +1,186 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::NdjsonPipeline do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:klass) do
+ Class.new do
+ include BulkImports::NdjsonPipeline
+
+ relation_name 'test'
+
+ attr_reader :portable, :current_user
+
+ def initialize(portable, user)
+ @portable = portable
+ @current_user = user
+ end
+ end
+ end
+
+ before do
+ stub_const('NdjsonPipelineClass', klass)
+ end
+
+ subject { NdjsonPipelineClass.new(group, user) }
+
+ it 'marks pipeline as ndjson' do
+ expect(NdjsonPipelineClass.ndjson_pipeline?).to eq(true)
+ end
+
+ describe '#deep_transform_relation!' do
+ it 'transforms relation hash' do
+ transformed = subject.deep_transform_relation!({}, 'test', {}) do |key, hash|
+ hash.merge(relation_key: key)
+ end
+
+ expect(transformed[:relation_key]).to eq('test')
+ end
+
+ context 'when subrelations is an array' do
+ it 'transforms each element of the array' do
+ relation_hash = {
+ 'key' => 'value',
+ 'labels' => [
+ { 'title' => 'label 1' },
+ { 'title' => 'label 2' },
+ { 'title' => 'label 3' }
+ ]
+ }
+ relation_definition = { 'labels' => {} }
+
+ transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash|
+ hash.merge(relation_key: key)
+ end
+
+ transformed['labels'].each do |label|
+ expect(label[:relation_key]).to eq('group_labels')
+ end
+ end
+ end
+
+ context 'when subrelation is a hash' do
+ it 'transforms subrelation hash' do
+ relation_hash = {
+ 'key' => 'value',
+ 'label' => { 'title' => 'label' }
+ }
+ relation_definition = { 'label' => {} }
+
+ transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash|
+ hash.merge(relation_key: key)
+ end
+
+ expect(transformed['label'][:relation_key]).to eq('group_label')
+ end
+ end
+
+ context 'when subrelation is nil' do
+ it 'removes subrelation' do
+ relation_hash = {
+ 'key' => 'value',
+ 'label' => { 'title' => 'label' }
+ }
+ relation_definition = { 'label' => {} }
+
+ transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash|
+ if key == 'group_label'
+ nil
+ else
+ hash
+ end
+ end
+
+ expect(transformed['label']).to be_nil
+ end
+ end
+ end
+
+ describe '#transform' do
+ it 'calls relation factory' do
+ hash = { key: :value }
+ data = [hash, 1]
+ user = double
+ config = double(relation_excluded_keys: nil, top_relation_tree: [])
+ context = double(portable: group, current_user: user, import_export_config: config)
+ allow(subject).to receive(:import_export_config).and_return(config)
+
+ expect(Gitlab::ImportExport::Group::RelationFactory)
+ .to receive(:create)
+ .with(
+ relation_index: 1,
+ relation_sym: :test,
+ relation_hash: hash,
+ importable: group,
+ members_mapper: instance_of(Gitlab::ImportExport::MembersMapper),
+ object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
+ user: user,
+ excluded_keys: nil
+ )
+
+ subject.transform(context, data)
+ end
+ end
+
+ describe '#load' do
+ context 'when object is not persisted' do
+ it 'saves the object' do
+ object = double(persisted?: false)
+
+ expect(object).to receive(:save!)
+
+ subject.load(nil, object)
+ end
+ end
+
+ context 'when object is persisted' do
+ it 'does not save the object' do
+ object = double(persisted?: true)
+
+ expect(object).not_to receive(:save!)
+
+ subject.load(nil, object)
+ end
+ end
+
+ context 'when object is missing' do
+ it 'returns' do
+ expect(subject.load(nil, nil)).to be_nil
+ end
+ end
+ end
+
+ describe '#relation_class' do
+ context 'when relation name is pluralized' do
+ it 'returns constantized class' do
+ expect(subject.relation_class('MergeRequest::Metrics')).to eq(MergeRequest::Metrics)
+ end
+ end
+
+ context 'when relation name is singularized' do
+ it 'returns constantized class' do
+ expect(subject.relation_class('Badge')).to eq(Badge)
+ end
+ end
+ end
+
+ describe '#relation_key_override' do
+ context 'when portable is group' do
+ it 'returns group relation name override' do
+ expect(subject.relation_key_override('labels')).to eq('group_labels')
+ end
+ end
+
+ context 'when portable is project' do
+ subject { NdjsonPipelineClass.new(project, user) }
+
+ it 'returns group relation name override' do
+ expect(subject.relation_key_override('labels')).to eq('project_labels')
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/pipeline/context_spec.rb b/spec/lib/bulk_imports/pipeline/context_spec.rb
index 5b7711ad5d7..83d6f494d53 100644
--- a/spec/lib/bulk_imports/pipeline/context_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/context_spec.rb
@@ -6,6 +6,9 @@ RSpec.describe BulkImports::Pipeline::Context do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project_entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:project_tracker) { create(:bulk_import_tracker, entity: project_entity) }
let_it_be(:entity) do
create(
@@ -51,4 +54,24 @@ RSpec.describe BulkImports::Pipeline::Context do
describe '#extra' do
it { expect(subject.extra).to eq(extra: :data) }
end
+
+ describe '#portable' do
+ it { expect(subject.portable).to eq(group) }
+
+ context 'when portable is project' do
+ subject { described_class.new(project_tracker) }
+
+ it { expect(subject.portable).to eq(project) }
+ end
+ end
+
+ describe '#import_export_config' do
+ it { expect(subject.import_export_config).to be_instance_of(BulkImports::FileTransfer::GroupConfig) }
+
+ context 'when portable is project' do
+ subject { described_class.new(project_tracker) }
+
+ it { expect(subject.import_export_config).to be_instance_of(BulkImports::FileTransfer::ProjectConfig) }
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/pipeline_spec.rb b/spec/lib/bulk_imports/pipeline_spec.rb
index dda2e41f06c..48c265d6118 100644
--- a/spec/lib/bulk_imports/pipeline_spec.rb
+++ b/spec/lib/bulk_imports/pipeline_spec.rb
@@ -63,6 +63,7 @@ RSpec.describe BulkImports::Pipeline do
BulkImports::MyPipeline.transformer(klass, options)
BulkImports::MyPipeline.loader(klass, options)
BulkImports::MyPipeline.abort_on_failure!
+ BulkImports::MyPipeline.ndjson_pipeline!
expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: klass, options: options })
@@ -74,6 +75,7 @@ RSpec.describe BulkImports::Pipeline do
expect(BulkImports::MyPipeline.get_loader).to eq({ klass: klass, options: options })
expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true)
+ expect(BulkImports::MyPipeline.ndjson_pipeline?).to eq(true)
end
end
end
diff --git a/spec/lib/bulk_imports/stage_spec.rb b/spec/lib/bulk_imports/stage_spec.rb
index 713cd3f22ab..d082faa90bc 100644
--- a/spec/lib/bulk_imports/stage_spec.rb
+++ b/spec/lib/bulk_imports/stage_spec.rb
@@ -10,7 +10,8 @@ RSpec.describe BulkImports::Stage do
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Groups::Pipelines::LabelsPipeline],
[1, BulkImports::Groups::Pipelines::MilestonesPipeline],
- [1, BulkImports::Groups::Pipelines::BadgesPipeline]
+ [1, BulkImports::Groups::Pipelines::BadgesPipeline],
+ [2, BulkImports::Groups::Pipelines::BoardsPipeline]
]
end