Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-04-21 02:50:22 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-04-21 02:50:22 +0300
commit9dc93a4519d9d5d7be48ff274127136236a3adb3 (patch)
tree70467ae3692a0e35e5ea56bcb803eb512a10bedb /spec/lib/bulk_imports/groups/pipelines
parent4b0f34b6d759d6299322b3a54453e930c6121ff0 (diff)
Add latest changes from gitlab-org/gitlab@13-11-stable-eev13.11.0-rc43
Diffstat (limited to 'spec/lib/bulk_imports/groups/pipelines')
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb116
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb37
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb14
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb87
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb14
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb90
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb30
7 files changed, 238 insertions, 150 deletions
diff --git a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
new file mode 100644
index 00000000000..9fa35c4707d
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::BadgesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path,
+ group: group
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports a group badge' do
+ first_page = extracted_data(has_next_page: true)
+ last_page = extracted_data(name: 'badge2')
+
+ allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor|
+ allow(extractor)
+ .to receive(:extract)
+ .and_return(first_page, last_page)
+ end
+
+ expect { subject.run }.to change(Badge, :count).by(2)
+
+ badge = group.badges.last
+
+ expect(badge.name).to eq('badge2')
+ expect(badge.link_url).to eq(badge_data['link_url'])
+ expect(badge.image_url).to eq(badge_data['image_url'])
+ end
+
+ describe '#load' do
+ it 'creates a badge' do
+ expect { subject.load(context, badge_data) }.to change(Badge, :count).by(1)
+
+ badge = group.badges.first
+
+ badge_data.each do |key, value|
+ expect(badge[key]).to eq(value)
+ end
+ end
+
+ it 'does nothing when the data is blank' do
+ expect { subject.load(context, nil) }.not_to change(Badge, :count)
+ end
+ end
+
+ describe '#transform' do
+ it 'return transformed badge hash' do
+ badge = subject.transform(context, badge_data)
+
+ expect(badge[:name]).to eq('badge')
+ expect(badge[:link_url]).to eq(badge_data['link_url'])
+ expect(badge[:image_url]).to eq(badge_data['image_url'])
+ expect(badge.keys).to contain_exactly(:name, :link_url, :image_url)
+ end
+
+ context 'when data is blank' do
+ it 'does nothing when the data is blank' do
+ expect(subject.transform(context, nil)).to be_nil
+ end
+ end
+ end
+
+ describe 'pipeline parts' do
+ it { expect(described_class).to include_module(BulkImports::Pipeline) }
+ it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
+
+ it 'has extractors' do
+ expect(described_class.get_extractor)
+ .to eq(
+ klass: BulkImports::Common::Extractors::RestExtractor,
+ options: {
+ query: BulkImports::Groups::Rest::GetBadgesQuery
+ }
+ )
+ end
+
+ it 'has transformers' do
+ expect(described_class.transformers)
+ .to contain_exactly(
+ { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
+ )
+ end
+ end
+
+ def badge_data(name = 'badge')
+ {
+ 'name' => name,
+ 'link_url' => 'https://gitlab.example.com',
+ 'image_url' => 'https://gitlab.example.com/image.png'
+ }
+ end
+
+ def extracted_data(name: 'badge', has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? '2' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name)], page_info: page_info)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
new file mode 100644
index 00000000000..8276349c5f4
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::EntityFinisher do
+ it 'updates the entity status to finished' do
+ entity = create(:bulk_import_entity, :started)
+ pipeline_tracker = create(:bulk_import_tracker, entity: entity)
+ context = BulkImports::Pipeline::Context.new(pipeline_tracker)
+ subject = described_class.new(context)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ bulk_import_id: entity.bulk_import.id,
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: entity.source_type,
+ pipeline_class: described_class.name,
+ message: 'Entity finished'
+ )
+ end
+
+ expect { subject.run }
+ .to change(entity, :status_name).to(:finished)
+ end
+
+ it 'does nothing when the entity is already finished' do
+ entity = create(:bulk_import_entity, :finished)
+ pipeline_tracker = create(:bulk_import_tracker, entity: entity)
+ context = BulkImports::Pipeline::Context.new(pipeline_tracker)
+ subject = described_class.new(context)
+
+ expect { subject.run }
+ .not_to change(entity, :status_name)
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
index 61950cdd9b0..39e782dc093 100644
--- a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
@@ -4,10 +4,11 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
describe '#run' do
- let(:user) { create(:user) }
- let(:parent) { create(:group) }
- let(:bulk_import) { create(:bulk_import, user: user) }
- let(:entity) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
create(
:bulk_import_entity,
bulk_import: bulk_import,
@@ -17,7 +18,8 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:group_data) do
{
@@ -37,7 +39,7 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
- allow(extractor).to receive(:extract).and_return([group_data])
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: group_data))
end
parent.add_owner(user)
diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
index 3327a30f1d5..8af646d1101 100644
--- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:cursor) { 'cursor' }
- let(:timestamp) { Time.new(2020, 01, 01).utc }
- let(:entity) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
+
+ let_it_be(:entity) do
create(
:bulk_import_entity,
source_full_path: 'source/full/path',
@@ -17,33 +17,15 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
- def label_data(title)
- {
- 'title' => title,
- 'description' => 'desc',
- 'color' => '#428BCA',
- 'created_at' => timestamp.to_s,
- 'updated_at' => timestamp.to_s
- }
- end
-
- def extractor_data(title:, has_next_page:, cursor: nil)
- page_info = {
- 'end_cursor' => cursor,
- 'has_next_page' => has_next_page
- }
-
- BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info)
- end
-
describe '#run' do
it 'imports a group labels' do
- first_page = extractor_data(title: 'label1', has_next_page: true, cursor: cursor)
- last_page = extractor_data(title: 'label2', has_next_page: false)
+ first_page = extracted_data(title: 'label1', has_next_page: true)
+ last_page = extracted_data(title: 'label2')
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
@@ -63,38 +45,6 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
end
end
- describe '#after_run' do
- context 'when extracted data has next page' do
- it 'updates tracker information and runs pipeline again' do
- data = extractor_data(title: 'label', has_next_page: true, cursor: cursor)
-
- expect(subject).to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :labels)
-
- expect(tracker.has_next_page).to eq(true)
- expect(tracker.next_page).to eq(cursor)
- end
- end
-
- context 'when extracted data has no next page' do
- it 'updates tracker information and does not run pipeline' do
- data = extractor_data(title: 'label', has_next_page: false)
-
- expect(subject).not_to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :labels)
-
- expect(tracker.has_next_page).to eq(false)
- expect(tracker.next_page).to be_nil
- end
- end
- end
-
describe '#load' do
it 'creates the label' do
data = label_data('label')
@@ -130,4 +80,23 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
)
end
end
+
+ def label_data(title)
+ {
+ 'title' => title,
+ 'description' => 'desc',
+ 'color' => '#428BCA',
+ 'created_at' => timestamp.to_s,
+ 'updated_at' => timestamp.to_s
+ }
+ end
+
+ def extracted_data(title:, has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info)
+ end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
index 74d3e09d263..d8a667ec92a 100644
--- a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
@@ -8,17 +8,17 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:cursor) { 'cursor' }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
describe '#run' do
it 'maps existing users to the imported group' do
- first_page = member_data(email: member_user1.email, has_next_page: true, cursor: cursor)
- last_page = member_data(email: member_user2.email, has_next_page: false)
+ first_page = extracted_data(email: member_user1.email, has_next_page: true)
+ last_page = extracted_data(email: member_user2.email)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
@@ -88,7 +88,7 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
end
end
- def member_data(email:, has_next_page:, cursor: nil)
+ def extracted_data(email:, has_next_page: false)
data = {
'created_at' => '2020-01-01T00:00:00Z',
'updated_at' => '2020-01-01T00:00:00Z',
@@ -102,8 +102,8 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
}
page_info = {
- 'end_cursor' => cursor,
- 'has_next_page' => has_next_page
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
}
BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
diff --git a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
index f0c34c65257..e5cf75c566b 100644
--- a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
@@ -5,11 +5,10 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:cursor) { 'cursor' }
let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let(:entity) do
+ let_it_be(:entity) do
create(
:bulk_import_entity,
bulk_import: bulk_import,
@@ -20,39 +19,19 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
- def milestone_data(title)
- {
- 'title' => title,
- 'description' => 'desc',
- 'state' => 'closed',
- 'start_date' => '2020-10-21',
- 'due_date' => '2020-10-22',
- 'created_at' => timestamp.to_s,
- 'updated_at' => timestamp.to_s
- }
- end
-
- def extracted_data(title:, has_next_page:, cursor: nil)
- page_info = {
- 'end_cursor' => cursor,
- 'has_next_page' => has_next_page
- }
-
- BulkImports::Pipeline::ExtractedData.new(data: [milestone_data(title)], page_info: page_info)
- end
-
before do
group.add_owner(user)
end
describe '#run' do
it 'imports group milestones' do
- first_page = extracted_data(title: 'milestone1', has_next_page: true, cursor: cursor)
- last_page = extracted_data(title: 'milestone2', has_next_page: false)
+ first_page = extracted_data(title: 'milestone1', iid: 1, has_next_page: true)
+ last_page = extracted_data(title: 'milestone2', iid: 2)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
@@ -75,38 +54,6 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
end
end
- describe '#after_run' do
- context 'when extracted data has next page' do
- it 'updates tracker information and runs pipeline again' do
- data = extracted_data(title: 'milestone', has_next_page: true, cursor: cursor)
-
- expect(subject).to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :milestones)
-
- expect(tracker.has_next_page).to eq(true)
- expect(tracker.next_page).to eq(cursor)
- end
- end
-
- context 'when extracted data has no next page' do
- it 'updates tracker information and does not run pipeline' do
- data = extracted_data(title: 'milestone', has_next_page: false)
-
- expect(subject).not_to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :milestones)
-
- expect(tracker.has_next_page).to eq(false)
- expect(tracker.next_page).to be_nil
- end
- end
- end
-
describe '#load' do
it 'creates the milestone' do
data = milestone_data('milestone')
@@ -120,7 +67,7 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
end
it 'raises NotAllowedError' do
- data = extracted_data(title: 'milestone', has_next_page: false)
+ data = extracted_data(title: 'milestone')
expect { subject.load(context, data) }.to raise_error(::BulkImports::Pipeline::NotAllowedError)
end
@@ -148,4 +95,29 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
)
end
end
+
+ def milestone_data(title, iid: 1)
+ {
+ 'title' => title,
+ 'description' => 'desc',
+ 'iid' => iid,
+ 'state' => 'closed',
+ 'start_date' => '2020-10-21',
+ 'due_date' => '2020-10-22',
+ 'created_at' => timestamp.to_s,
+ 'updated_at' => timestamp.to_s
+ }
+ end
+
+ def extracted_data(title:, iid: 1, has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(
+ data: milestone_data(title, iid: iid),
+ page_info: page_info
+ )
+ end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
index 2a99646bb4a..e4a41428dd2 100644
--- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
@@ -6,31 +6,23 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, path: 'group') }
let_it_be(:parent) { create(:group, name: 'imported-group', path: 'imported-group') }
- let(:context) { BulkImports::Pipeline::Context.new(parent_entity) }
+ let_it_be(:parent_entity) { create(:bulk_import_entity, destination_namespace: parent.full_path, group: parent) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: parent_entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
- describe '#run' do
- let!(:parent_entity) do
- create(
- :bulk_import_entity,
- destination_namespace: parent.full_path,
- group: parent
- )
- end
-
- let(:subgroup_data) do
- [
- {
- "name" => "subgroup",
- "full_path" => "parent/subgroup"
- }
- ]
- end
+ let(:extracted_data) do
+ BulkImports::Pipeline::ExtractedData.new(data: {
+ 'name' => 'subgroup',
+ 'full_path' => 'parent/subgroup'
+ })
+ end
+ describe '#run' do
before do
allow_next_instance_of(BulkImports::Groups::Extractors::SubgroupsExtractor) do |extractor|
- allow(extractor).to receive(:extract).and_return(subgroup_data)
+ allow(extractor).to receive(:extract).and_return(extracted_data)
end
parent.add_owner(user)