Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
commitaee0a117a889461ce8ced6fcf73207fe017f1d99 (patch)
tree891d9ef189227a8445d83f35c1b0fc99573f4380 /spec/lib/bulk_imports
parent8d46af3258650d305f53b819eabf7ab18d22f59e (diff)
Add latest changes from gitlab-org/gitlab@14-6-stable-eev14.6.0-rc42
Diffstat (limited to 'spec/lib/bulk_imports')
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb96
-rw-r--r--spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb10
-rw-r--r--spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb10
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb127
-rw-r--r--spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb36
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb116
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb77
-rw-r--r--spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb22
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb8
-rw-r--r--spec/lib/bulk_imports/ndjson_pipeline_spec.rb26
-rw-r--r--spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb58
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb52
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb176
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb40
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb64
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb159
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb45
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb11
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb27
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb119
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb168
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb12
23 files changed, 1171 insertions, 292 deletions
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 623f9aa453a..1bbc96af8ee 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -38,11 +38,11 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'when response is not success' do
it 'raises BulkImports::Error' do
- response_double = double(code: 503, success?: false)
+ response_double = double(code: 503, success?: false, request: double(path: double(path: '/test')))
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test')
end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb
new file mode 100644
index 00000000000..6c5465c8a66
--- /dev/null
+++ b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
+
+ let(:entity) { create(:bulk_import_entity, group: group) }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ let(:first_page) { extracted_data(has_next_page: true) }
+ let(:last_page) { extracted_data(name: 'badge2') }
+
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(first_page, last_page)
+ end
+ end
+
+ it 'imports a group badge' do
+ expect { pipeline.run }.to change(Badge, :count).by(2)
+
+ badge = group.badges.last
+
+ expect(badge.name).to eq('badge2')
+ expect(badge.link_url).to eq(badge_data['link_url'])
+ expect(badge.image_url).to eq(badge_data['image_url'])
+ end
+
+ context 'when project entity' do
+ let(:first_page) { extracted_data(has_next_page: true) }
+ let(:last_page) { extracted_data(name: 'badge2', kind: 'project') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+
+ it 'imports a project badge & skips group badge' do
+ expect { pipeline.run }.to change(Badge, :count).by(1)
+
+ badge = project.badges.last
+
+ expect(badge.name).to eq('badge2')
+ expect(badge.link_url).to eq(badge_data['link_url'])
+ expect(badge.image_url).to eq(badge_data['image_url'])
+ expect(badge.type).to eq('ProjectBadge')
+ end
+ end
+
+ describe '#transform' do
+ it 'return transformed badge hash' do
+ badge = subject.transform(context, badge_data)
+
+ expect(badge[:name]).to eq('badge')
+ expect(badge[:link_url]).to eq(badge_data['link_url'])
+ expect(badge[:image_url]).to eq(badge_data['image_url'])
+ expect(badge.keys).to contain_exactly(:name, :link_url, :image_url)
+ end
+
+ context 'when data is blank' do
+ it 'does nothing when the data is blank' do
+ expect(subject.transform(context, nil)).to be_nil
+ end
+ end
+
+ context 'when project entity & group badge' do
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+
+ it 'returns' do
+ expect(subject.transform(context, { 'name' => 'test', 'kind' => 'group' })).to be_nil
+ end
+ end
+ end
+
+ def badge_data(name = 'badge', kind = 'group')
+ {
+ 'name' => name,
+ 'link_url' => 'https://gitlab.example.com',
+ 'image_url' => 'https://gitlab.example.com/image.png',
+ 'kind' => kind
+ }
+ end
+
+ def extracted_data(name: 'badge', kind: 'group', has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? '2' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name, kind)], page_info: page_info)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
index 9e3a6d5b8df..48db24def48 100644
--- a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb
@@ -59,16 +59,6 @@ RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do
end
end
- context 'when label is persisted' do
- it 'does not save label' do
- label = create(:group_label, group: group)
-
- expect(label).not_to receive(:save!)
-
- subject.load(context, label)
- end
- end
-
context 'when label is missing' do
it 'returns' do
expect(subject.load(context, nil)).to be_nil
diff --git a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb
index 9f71175f46f..902b29bc365 100644
--- a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb
@@ -81,16 +81,6 @@ RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do
end
end
- context 'when milestone is persisted' do
- it 'does not save milestone' do
- milestone = create(:milestone, group: group)
-
- expect(milestone).not_to receive(:save!)
-
- subject.load(context, milestone)
- end
- end
-
context 'when milestone is missing' do
it 'returns' do
expect(subject.load(context, nil)).to be_nil
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index a3cc866a406..0f6238e10dc 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:project) { create(:project) }
- let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
- let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let_it_be(:group) { create(:group) }
+
+ let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
+ let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject(:pipeline) { described_class.new(context) }
@@ -24,57 +25,101 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
- describe '#run' do
- it 'imports uploads into destination portable and removes tmpdir' do
- allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
- allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ shared_examples 'uploads import' do
+ describe '#run' do
+ before do
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ end
- pipeline.run
+ it 'imports uploads into destination portable and removes tmpdir' do
+ pipeline.run
- expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
+ expect(portable.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
- expect(Dir.exist?(tmpdir)).to eq(false)
- end
- end
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
- describe '#extract' do
- it 'downloads & extracts upload paths' do
- allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
- expect(pipeline).to receive(:untar_zxf)
- file_download_service = instance_double("BulkImports::FileDownloadService")
+ context 'when importing avatar' do
+ let(:uploads_dir_path) { File.join(tmpdir, 'avatar') }
- expect(BulkImports::FileDownloadService)
- .to receive(:new)
- .with(
- configuration: context.configuration,
- relative_url: "/projects/test/export_relations/download?relation=uploads",
- dir: tmpdir,
- filename: 'uploads.tar.gz')
- .and_return(file_download_service)
+ it 'imports avatar' do
+ FileUtils.touch(File.join(uploads_dir_path, 'avatar.png'))
- expect(file_download_service).to receive(:execute)
+ expect_next_instance_of(entity.update_service) do |service|
+ expect(service).to receive(:execute)
+ end
- extracted_data = pipeline.extract(context)
+ pipeline.run
+ end
- expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
- end
- end
+ context 'when something goes wrong' do
+ it 'raises exception' do
+ allow_next_instance_of(entity.update_service) do |service|
+ allow(service).to receive(:execute).and_return(nil)
+ end
+
+ pipeline.run
- describe '#load' do
- it 'creates a file upload' do
- expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1)
+ expect(entity.failures.first.exception_class).to include('AvatarLoadingError')
+ end
+ end
+ end
end
- context 'when dynamic path is nil' do
- it 'returns' do
- expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count }
+ describe '#extract' do
+ it 'downloads & extracts upload paths' do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ expect(pipeline).to receive(:untar_zxf)
+ file_download_service = instance_double("BulkImports::FileDownloadService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads",
+ dir: tmpdir,
+ filename: 'uploads.tar.gz')
+ .and_return(file_download_service)
+
+ expect(file_download_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
end
end
- context 'when path is a directory' do
- it 'returns' do
- expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count }
+ describe '#load' do
+ it 'creates a file upload' do
+ expect { pipeline.load(context, upload_file_path) }.to change { portable.uploads.count }.by(1)
+ end
+
+ context 'when dynamic path is nil' do
+ it 'returns' do
+ expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { portable.uploads.count }
+ end
+ end
+
+ context 'when path is a directory' do
+ it 'returns' do
+ expect { pipeline.load(context, uploads_dir_path) }.not_to change { portable.uploads.count }
+ end
end
end
end
+
+ context 'when importing to group' do
+ let(:portable) { group }
+ let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
+
+ context 'when importing to project' do
+ let(:portable) { project }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
end
diff --git a/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb
new file mode 100644
index 00000000000..0a04c0a2243
--- /dev/null
+++ b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Rest::GetBadgesQuery do
+ describe '.to_h' do
+ shared_examples 'resource and page info query' do
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:encoded_full_path) { ERB::Util.url_encode(entity.source_full_path) }
+
+ it 'returns correct query and page info' do
+ expected = {
+ resource: [entity.pluralized_name, encoded_full_path, 'badges'].join('/'),
+ query: {
+ page: context.tracker.next_page
+ }
+ }
+
+ expect(described_class.to_h(context)).to eq(expected)
+ end
+ end
+
+ context 'when entity is group' do
+ let(:entity) { create(:bulk_import_entity) }
+
+ include_examples 'resource and page info query'
+ end
+
+ context 'when entity is project' do
+ let(:entity) { create(:bulk_import_entity, :project_entity) }
+
+ include_examples 'resource and page info query'
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
deleted file mode 100644
index 9fa35c4707d..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
+++ /dev/null
@@ -1,116 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::BadgesPipeline do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
-
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
- destination_namespace: group.full_path,
- group: group
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'imports a group badge' do
- first_page = extracted_data(has_next_page: true)
- last_page = extracted_data(name: 'badge2')
-
- allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor|
- allow(extractor)
- .to receive(:extract)
- .and_return(first_page, last_page)
- end
-
- expect { subject.run }.to change(Badge, :count).by(2)
-
- badge = group.badges.last
-
- expect(badge.name).to eq('badge2')
- expect(badge.link_url).to eq(badge_data['link_url'])
- expect(badge.image_url).to eq(badge_data['image_url'])
- end
-
- describe '#load' do
- it 'creates a badge' do
- expect { subject.load(context, badge_data) }.to change(Badge, :count).by(1)
-
- badge = group.badges.first
-
- badge_data.each do |key, value|
- expect(badge[key]).to eq(value)
- end
- end
-
- it 'does nothing when the data is blank' do
- expect { subject.load(context, nil) }.not_to change(Badge, :count)
- end
- end
-
- describe '#transform' do
- it 'return transformed badge hash' do
- badge = subject.transform(context, badge_data)
-
- expect(badge[:name]).to eq('badge')
- expect(badge[:link_url]).to eq(badge_data['link_url'])
- expect(badge[:image_url]).to eq(badge_data['image_url'])
- expect(badge.keys).to contain_exactly(:name, :link_url, :image_url)
- end
-
- context 'when data is blank' do
- it 'does nothing when the data is blank' do
- expect(subject.transform(context, nil)).to be_nil
- end
- end
- end
-
- describe 'pipeline parts' do
- it { expect(described_class).to include_module(BulkImports::Pipeline) }
- it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
-
- it 'has extractors' do
- expect(described_class.get_extractor)
- .to eq(
- klass: BulkImports::Common::Extractors::RestExtractor,
- options: {
- query: BulkImports::Groups::Rest::GetBadgesQuery
- }
- )
- end
-
- it 'has transformers' do
- expect(described_class.transformers)
- .to contain_exactly(
- { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
- )
- end
- end
-
- def badge_data(name = 'badge')
- {
- 'name' => name,
- 'link_url' => 'https://gitlab.example.com',
- 'image_url' => 'https://gitlab.example.com/image.png'
- }
- end
-
- def extracted_data(name: 'badge', has_next_page: false)
- page_info = {
- 'has_next_page' => has_next_page,
- 'next_page' => has_next_page ? '2' : nil
- }
-
- BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name)], page_info: page_info)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
deleted file mode 100644
index c68284aa580..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
-
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- group: group,
- bulk_import: bulk_import,
- source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
- destination_namespace: group.full_path
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'updates the group avatar' do
- avatar_path = 'spec/fixtures/dk.png'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect { subject.run }.to change(context.group, :avatar)
-
- expect(context.group.avatar.filename).to eq(File.basename(avatar_path))
- end
-
- it 'raises an error when the avatar upload fails' do
- avatar_path = 'spec/fixtures/aosp_manifest.xml'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error)
- .with(
- bulk_import_id: context.bulk_import.id,
- bulk_import_entity_id: context.entity.id,
- bulk_import_entity_type: context.entity.source_type,
- context_extra: context.extra,
- exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError",
- exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon",
- pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline",
- pipeline_step: :loader
- )
- end
-
- expect { subject.run }.to change(BulkImports::Failure, :count)
- end
- end
-
- def stub_file_download(filepath = 'file/path.png', **params)
- expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader|
- expect(downloader).to receive(:execute).and_return(filepath)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb
deleted file mode 100644
index eef6848e118..00000000000
--- a/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Rest::GetBadgesQuery do
- describe '.to_h' do
- it 'returns query resource and page info' do
- entity = create(:bulk_import_entity)
- tracker = create(:bulk_import_tracker, entity: entity)
- context = BulkImports::Pipeline::Context.new(tracker)
- encoded_full_path = ERB::Util.url_encode(entity.source_full_path)
- expected = {
- resource: ['groups', encoded_full_path, 'badges'].join('/'),
- query: {
- page: context.tracker.next_page
- }
- }
-
- expect(described_class.to_h(context)).to eq(expected)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 5719acac4d7..55a8e40f480 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -8,13 +8,13 @@ RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::MilestonesPipeline],
- [1, BulkImports::Groups::Pipelines::BadgesPipeline],
- [2, BulkImports::Common::Pipelines::BoardsPipeline]
+ [1, BulkImports::Common::Pipelines::BadgesPipeline],
+ [2, BulkImports::Common::Pipelines::BoardsPipeline],
+ [2, BulkImports::Common::Pipelines::UploadsPipeline]
]
end
@@ -24,7 +24,7 @@ RSpec.describe BulkImports::Groups::Stage do
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.new(bulk_import).pipelines & pipelines).to eq(pipelines)
+ expect(described_class.new(bulk_import).pipelines & pipelines).to contain_exactly(*pipelines)
expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end
diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
index c5197fb29d9..8ea6ceb7619 100644
--- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
@@ -130,6 +130,22 @@ RSpec.describe BulkImports::NdjsonPipeline do
subject.transform(context, data)
end
+
+ context 'when data is nil' do
+ before do
+ expect(Gitlab::ImportExport::Group::RelationFactory).not_to receive(:create)
+ end
+
+ it 'returns' do
+ expect(subject.transform(nil, nil)).to be_nil
+ end
+
+ context 'when relation hash is nil' do
+ it 'returns' do
+ expect(subject.transform(nil, [nil, 0])).to be_nil
+ end
+ end
+ end
end
describe '#load' do
@@ -143,16 +159,6 @@ RSpec.describe BulkImports::NdjsonPipeline do
end
end
- context 'when object is persisted' do
- it 'does not save the object' do
- object = double(persisted?: true)
-
- expect(object).not_to receive(:save!)
-
- subject.load(nil, object)
- end
- end
-
context 'when object is missing' do
it 'returns' do
expect(subject.load(nil, nil)).to be_nil
diff --git a/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
new file mode 100644
index 00000000000..b680fa5cbfc
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Graphql::GetSnippetRepositoryQuery do
+ describe 'query repository based on full_path' do
+ let_it_be(:entity) { create(:bulk_import_entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ it 'has a valid query' do
+ query = GraphQL::Query.new(
+ GitlabSchema,
+ described_class.to_s,
+ variables: described_class.variables(context)
+ )
+ result = GitlabSchema.static_validator.validate(query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ it 'returns snippet httpUrlToRepo' do
+ expect(described_class.to_s).to include('httpUrlToRepo')
+ end
+
+ it 'returns snippet createdAt' do
+ expect(described_class.to_s).to include('createdAt')
+ end
+
+ it 'returns snippet title' do
+ expect(described_class.to_s).to include('title')
+ end
+
+ describe '.variables' do
+ it 'queries project based on source_full_path and pagination' do
+ expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 }
+
+ expect(described_class.variables(context)).to eq(expected)
+ end
+ end
+
+ describe '.data_path' do
+ it '.data_path returns data path' do
+ expected = %w[data project snippets nodes]
+
+ expect(described_class.data_path).to eq(expected)
+ end
+ end
+
+ describe '.page_info_path' do
+ it '.page_info_path returns pagination information path' do
+ expected = %w[data project snippets page_info]
+
+ expect(described_class.page_info_path).to eq(expected)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb
new file mode 100644
index 00000000000..e2744a6a457
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:auto_devops) do
+ {
+ 'created_at' => '2016-06-13T15:02:47.967Z',
+ 'updated_at' => '2016-06-14T15:02:47.967Z',
+ 'enabled' => true,
+ 'deploy_strategy' => 'continuous'
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports auto devops options into destination project' do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [auto_devops]))
+ end
+
+ pipeline.run
+
+ expect(project.auto_devops.enabled).to be_truthy
+ expect(project.auto_devops.deploy_strategy).to eq('continuous')
+ expect(project.auto_devops.created_at).to eq('2016-06-13T15:02:47.967Z')
+ expect(project.auto_devops.updated_at).to eq('2016-06-14T15:02:47.967Z')
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
new file mode 100644
index 00000000000..98a2e8b6a57
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let(:ci_pipeline_attributes) { {} }
+ let(:ci_pipeline) do
+ {
+ sha: "fakesha",
+ ref: "fakeref",
+ project: project,
+ source: "web"
+ }.merge(ci_pipeline_attributes)
+ end
+
+ let(:ci_pipeline2) do
+ {
+ sha: "fakesha2",
+ ref: "fakeref2",
+ project: project,
+ source: "web"
+ }.merge(ci_pipeline_attributes)
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(
+ BulkImports::Pipeline::ExtractedData.new(data: [ci_pipeline, ci_pipeline2])
+ )
+ end
+
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_source_branch!)
+ end
+
+ pipeline.run
+ end
+
+ it 'imports Ci::Pipeline into destination project' do
+ expect(project.all_pipelines.count).to eq(2)
+ expect(project.ci_pipelines.first.sha).to eq('fakesha')
+ expect(project.ci_pipelines.second.sha).to eq('fakesha2')
+ end
+
+ context 'notes' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'notes' => [
+ {
+ 'note' => 'test note',
+ 'author_id' => 22,
+ 'noteable_type' => 'Commit',
+ 'sha' => '',
+ 'author' => {
+ 'name' => 'User 22'
+ },
+ 'commit_id' => 'fakesha',
+ 'updated_at' => '2016-06-14T15:02:47.770Z',
+ 'events' => [
+ {
+ 'action' => 'created',
+ 'author_id' => 22
+ }
+ ]
+ }
+ ]
+ }
+ end
+
+ it 'imports pipeline with notes' do
+ note = project.all_pipelines.first.notes.first
+ expect(note.note).to include('test note')
+ expect(note.events.first.action).to eq('created')
+ end
+ end
+
+ context 'stages' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'stages' => [
+ {
+ 'name' => 'test stage',
+ 'statuses' => [
+ {
+ 'name' => 'first status',
+ 'status' => 'created'
+ }
+ ]
+ }
+ ]
+ }
+ end
+
+ it 'imports pipeline with notes' do
+ stage = project.all_pipelines.first.stages.first
+ expect(stage.name).to eq('test stage')
+ expect(stage.statuses.first.name).to eq('first status')
+ end
+ end
+
+ context 'external pull request' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'source' => 'external_pull_request_event',
+ 'external_pull_request' => {
+ 'source_branch' => 'test source branch',
+ 'target_branch' => 'master',
+ 'source_sha' => 'testsha',
+ 'target_sha' => 'targetsha',
+ 'source_repository' => 'test repository',
+ 'target_repository' => 'test repository',
+ 'status' => 1,
+ 'pull_request_iid' => 1
+ }
+ }
+ end
+
+ it 'imports pipeline with external pull request' do
+ pull_request = project.all_pipelines.first.external_pull_request
+ expect(pull_request.source_branch).to eq('test source branch')
+ expect(pull_request.status).to eq('open')
+ end
+ end
+
+ context 'merge request' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'source' => 'merge_request_event',
+ 'merge_request' => {
+ 'description' => 'test merge request',
+ 'title' => 'test MR',
+ 'source_branch' => 'test source branch',
+ 'target_branch' => 'master',
+ 'source_sha' => 'testsha',
+ 'target_sha' => 'targetsha',
+ 'source_repository' => 'test repository',
+ 'target_repository' => 'test repository',
+ 'target_project_id' => project.id,
+ 'source_project_id' => project.id,
+ 'author_id' => user.id
+ }
+ }
+ end
+
+ it 'imports pipeline with external pull request' do
+ merge_request = project.all_pipelines.first.merge_request
+ expect(merge_request.source_branch).to eq('test source branch')
+ expect(merge_request.description).to eq('test merge request')
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
new file mode 100644
index 00000000000..9dac8e45ef9
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let_it_be(:policy) do
+ {
+ 'created_at' => '2019-12-13 13:45:04 UTC',
+ 'updated_at' => '2019-12-14 13:45:04 UTC',
+ 'next_run_at' => '2019-12-15 13:45:04 UTC',
+ 'name_regex' => 'test',
+ 'name_regex_keep' => 'regex_keep',
+ 'cadence' => '3month',
+ 'older_than' => '1month',
+ 'keep_n' => 100,
+ 'enabled' => true
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[policy, 0]]))
+ end
+
+ pipeline.run
+
+ policy.each_pair do |key, value|
+ expect(entity.project.container_expiration_policy.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
new file mode 100644
index 00000000000..12713f008bb
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:schedule_attributes) { {} }
+ let(:schedule) do
+ {
+ 'description' => 'test pipeline schedule',
+ 'cron' => '1 1 1 1 1',
+ 'cron_timezone' => 'UTC',
+ 'ref' => 'testref',
+ 'created_at' => '2016-06-13T15:02:47.967Z',
+ 'updated_at' => '2016-06-14T15:02:47.967Z'
+ }.merge(schedule_attributes)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [schedule]))
+ end
+
+ pipeline.run
+ end
+
+ it 'imports schedule into destination project' do
+ expect(project.pipeline_schedules.count).to eq(1)
+ pipeline_schedule = project.pipeline_schedules.first
+ schedule.each do |k, v|
+ expect(pipeline_schedule.send(k)).to eq(v)
+ end
+ end
+
+ context 'is active' do
+ let(:schedule_attributes) { { 'active' => true } }
+
+ it 'imports the schedule but active is false' do
+ expect(project.pipeline_schedules.first.active).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
new file mode 100644
index 00000000000..11c475318bb
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:extra) { {} }
+ let(:project_attributes) do
+ {
+ 'description' => 'description',
+ 'visibility_level' => 0,
+ 'archived' => false,
+ 'merge_requests_template' => 'test',
+ 'merge_requests_rebase_enabled' => true,
+ 'approvals_before_merge' => 0,
+ 'reset_approvals_on_push' => true,
+ 'merge_requests_ff_only_enabled' => true,
+ 'issues_template' => 'test',
+ 'shared_runners_enabled' => true,
+ 'build_coverage_regex' => 'build_coverage_regex',
+ 'build_allow_git_fetch' => true,
+ 'build_timeout' => 3600,
+ 'pending_delete' => false,
+ 'public_builds' => true,
+ 'last_repository_check_failed' => nil,
+ 'only_allow_merge_if_pipeline_succeeds' => true,
+ 'has_external_issue_tracker' => false,
+ 'request_access_enabled' => true,
+ 'has_external_wiki' => false,
+ 'ci_config_path' => nil,
+ 'only_allow_merge_if_all_discussions_are_resolved' => true,
+ 'printing_merge_request_link_enabled' => true,
+ 'auto_cancel_pending_pipelines' => 'enabled',
+ 'service_desk_enabled' => false,
+ 'delete_error' => nil,
+ 'disable_overriding_approvers_per_merge_request' => true,
+ 'resolve_outdated_diff_discussions' => true,
+ 'jobs_cache_index' => nil,
+ 'external_authorization_classification_label' => nil,
+ 'pages_https_only' => false,
+ 'merge_requests_author_approval' => false,
+ 'merge_requests_disable_committers_approval' => true,
+ 'require_password_to_approve' => true,
+ 'remove_source_branch_after_merge' => true,
+ 'autoclose_referenced_issues' => true,
+ 'suggestion_commit_message' => 'Test!'
+ }.merge(extra)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ before do
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
+
+ pipeline.run
+ end
+
+ it 'imports project attributes', :aggregate_failures do
+ project_attributes.each_pair do |key, value|
+ expect(project.public_send(key)).to eq(value)
+ end
+ end
+
+ context 'when project is archived' do
+ let(:extra) { { 'archived' => true } }
+
+ it 'sets project as archived' do
+ expect(project.archived).to eq(true)
+ end
+ end
+ end
+
+ describe '#extract' do
+ before do
+ file_download_service = instance_double("BulkImports::FileDownloadService")
+ file_decompression_service = instance_double("BulkImports::FileDecompressionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
+ dir: tmpdir,
+ filename: 'self.json.gz')
+ .and_return(file_download_service)
+
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(dir: tmpdir, filename: 'self.json.gz')
+ .and_return(file_decompression_service)
+
+ expect(file_download_service).to receive(:execute)
+ expect(file_decompression_service).to receive(:execute)
+ end
+
+ it 'downloads, decompresses & decodes json' do
+ allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
+ end
+
+ context 'when json parsing error occurs' do
+ it 'raises an error' do
+ allow(pipeline).to receive(:json_attributes).and_return("invalid")
+
+ expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
+ end
+ end
+ end
+
+ describe '#transform' do
+ it 'removes prohibited attributes from hash' do
+ input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
+
+ expect(Gitlab::ImportExport::AttributeCleaner).to receive(:clean).and_call_original
+
+ expect(pipeline.transform(context, input)).to eq({ 'description' => 'description' })
+ end
+ end
+
+ describe '#load' do
+ it 'assigns attributes, drops visibility and reconciles shared runner setting' do
+ expect(project).to receive(:assign_attributes).with(project_attributes)
+ expect(project).to receive(:reconcile_shared_runners_setting!)
+ expect(project).to receive(:drop_visibility_level!)
+ expect(project).to receive(:save!)
+
+ pipeline.load(context, project_attributes)
+ end
+ end
+
+ describe '#json_attributes' do
+ it 'reads raw json from file' do
+ filepath = File.join(tmpdir, 'self.json')
+
+ FileUtils.touch(filepath)
+ expect_file_read(filepath)
+
+ pipeline.json_attributes
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb
new file mode 100644
index 00000000000..1f0defdd20c
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ProjectFeaturePipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let_it_be(:project_feature) do
+ {
+ "builds_access_level": 10,
+ "wiki_access_level": 10,
+ "issues_access_level": 10,
+ "merge_requests_access_level": 10,
+ "snippets_access_level": 10,
+ "repository_access_level": 10,
+ "pages_access_level": 10,
+ "forking_access_level": 10,
+ "metrics_dashboard_access_level": 10,
+ "operations_access_level": 10,
+ "analytics_access_level": 10,
+ "security_and_compliance_access_level": 10,
+ "container_registry_access_level": 10,
+ "updated_at": "2016-09-23T11:58:28.000Z",
+ "created_at": "2014-12-26T09:26:45.000Z"
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[project_feature, 0]]))
+ end
+
+ pipeline.run
+
+ project_feature.each_pair do |key, value|
+ expect(entity.project.project_feature.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
index 583485faf8d..38b22538e70 100644
--- a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
@@ -47,6 +47,17 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do
end
end
+ context 'project has no repository' do
+ let(:project_data) { { 'httpUrlToRepo' => '' } }
+
+ it 'skips repository import' do
+ expect(context.portable).not_to receive(:ensure_repository)
+ expect(context.portable.repository).not_to receive(:fetch_as_mirror)
+
+ pipeline.run
+ end
+ end
+
context 'blocked local networks' do
let(:project_data) { { 'httpUrlToRepo' => 'http://localhost/foo.git' } }
diff --git a/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb
new file mode 100644
index 00000000000..2dfa036fc48
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let_it_be(:setting) { { 'issue_template_key' => 'test', 'project_key' => 'key' } }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[setting, 0]]))
+ end
+
+ pipeline.run
+
+ setting.each_pair do |key, value|
+ expect(entity.project.service_desk_setting.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
new file mode 100644
index 00000000000..dae879de998
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:snippet_attributes) { {} }
+ let(:exported_snippet) do
+ {
+ 'id' => 25,
+ 'title' => 'Snippet with 2 files',
+ 'content' => 'content',
+ 'author_id' => 22,
+ 'project_id' => 6,
+ 'created_at' => '2021-10-28T20:21:59.712Z',
+ 'updated_at' => '2021-10-28T20:31:10.408Z',
+ 'file_name' => 'galactic_empire.rb',
+ 'visibility_level' => 0,
+ 'description' => 'How to track your Galactic armies.'
+ }.merge(snippet_attributes)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ group.add_owner(user)
+ snippet_with_index = [exported_snippet.dup, 0]
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [snippet_with_index]))
+ end
+
+ pipeline.run
+ end
+
+ it 'imports snippet into destination project' do
+ imported_snippet = project.snippets.last
+
+ expect(imported_snippet).to have_attributes(
+ title: exported_snippet['title'],
+ content: exported_snippet['content'],
+ author_id: user.id,
+ created_at: DateTime.parse(exported_snippet['created_at']),
+ updated_at: DateTime.parse(exported_snippet['updated_at']),
+ file_name: exported_snippet['file_name'],
+ visibility_level: exported_snippet['visibility_level'])
+ end
+
+ context 'with award_emoji' do
+ let(:snippet_attributes) { { 'award_emoji' => [expected_award] } }
+ let(:expected_award) do
+ {
+ 'id' => 580,
+ 'name' => 'rocket',
+ 'user_id' => 1,
+ 'awardable_type' => 'Snippet',
+ 'created_at' => '2021-10-28T20:30:25.802Z',
+ 'updated_at' => '2021-10-28T20:30:25.802Z'
+ }
+ end
+
+ it 'restores the award_emoji' do
+ snippet_award = project.snippets.first.award_emoji.first
+
+ expect(snippet_award).to have_attributes(
+ name: expected_award['name'],
+ user_id: user.id,
+ awardable_type: expected_award['awardable_type'],
+ created_at: DateTime.parse(expected_award['created_at']),
+ updated_at: DateTime.parse(expected_award['updated_at']))
+ end
+ end
+
+ context 'with notes', :freeze_time do
+ # To properly emulate a fixture that is expected to be read from a file, we dump a json
+ # object, then parse it right away. We expected that some attrs like Datetimes be
+ # converted to Strings.
+ let(:exported_snippet) { Gitlab::Json.parse(note.noteable.attributes.merge('notes' => notes).to_json) }
+ let(:note) { create(:note_on_project_snippet, :with_attachment) }
+ let(:notes) { [note.attributes.merge('author' => { 'name' => note.author.name })] }
+
+ it 'restores the notes' do
+ snippet_note = project.snippets.last.notes.first
+ author_name = note.author.name
+ note_updated_at = exported_snippet['notes'].first['updated_at'].split('.').first
+
+ expect(snippet_note).to have_attributes(
+ note: note.note + "\n\n *By #{author_name} on #{note_updated_at} (imported from GitLab)*",
+ noteable_type: note.noteable_type,
+ author_id: user.id,
+ updated_at: note.updated_at,
+ line_code: note.line_code,
+ commit_id: note.commit_id,
+ system: note.system,
+ st_diff: note.st_diff,
+ updated_by_id: user.id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
new file mode 100644
index 00000000000..9897e74ec7b
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:bulk_import) { create(:bulk_import, user: user) }
+ let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+ let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z")}
+ let(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import_configuration.bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: project.full_path
+ )
+ end
+
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ let(:http_url_to_repo) { 'https://example.com/foo/bar/snippets/42.git' }
+ let(:data) do
+ [
+ {
+ 'title' => matched_snippet.title,
+ 'httpUrlToRepo' => http_url_to_repo,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ let(:page_info) do
+ {
+ 'next_page' => 'eyJpZCI6IjIyMDA2OTYifQ',
+ 'has_next_page' => false
+ }
+ end
+
+ let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) }
+
+ describe 'extractor' do
+ it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do
+ expect(described_class.get_extractor).to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: {
+ query: BulkImports::Projects::Graphql::GetSnippetRepositoryQuery
+ })
+ end
+ end
+
+ describe '#run' do
+ let(:validation_response) { double(Hash, 'error?': false) }
+
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(extracted_data)
+ end
+
+ allow_next_instance_of(Snippets::RepositoryValidationService) do |repository_validation|
+ allow(repository_validation).to receive(:execute).and_return(validation_response)
+ end
+ end
+
+ shared_examples 'skippable snippet' do
+ it 'does not create snippet repo' do
+ pipeline.run
+
+ expect(Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists?).to be false
+ end
+ end
+
+ context 'when a snippet is not matched' do
+ let(:data) do
+ [
+ {
+ 'title' => 'unmatched title',
+ 'httpUrlToRepo' => http_url_to_repo,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when httpUrlToRepo is empty' do
+ let(:data) do
+ [
+ {
+ 'title' => matched_snippet.title,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when a snippet matches' do
+ context 'when snippet url is valid' do
+ it 'creates snippet repo' do
+ expect { pipeline.run }
+ .to change { Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists? }.to true
+ end
+
+ it 'updates snippets statistics' do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_as_mirror)
+ end
+
+ service = double(Snippets::UpdateStatisticsService)
+
+ expect(Snippets::UpdateStatisticsService).to receive(:new).with(kind_of(Snippet)).and_return(service)
+ expect(service).to receive(:execute)
+
+ pipeline.run
+ end
+
+ it 'fetches snippet repo from url' do
+ expect_next_instance_of(Repository) do |repository|
+ expect(repository)
+ .to receive(:fetch_as_mirror)
+ .with("https://oauth2:#{bulk_import_configuration.access_token}@example.com/foo/bar/snippets/42.git")
+ end
+
+ pipeline.run
+ end
+ end
+
+ context 'when url is invalid' do
+ let(:http_url_to_repo) { 'http://0.0.0.0' }
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when snippet is invalid' do
+ let(:validation_response) { double(Hash, 'error?': true) }
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_as_mirror)
+ end
+ end
+
+ it 'does not leave a hanging SnippetRepository behind' do
+ pipeline.run
+
+ expect(SnippetRepository.where(snippet_id: matched_snippet.id).exists?).to be false
+ end
+
+ it 'does not call UpdateStatisticsService' do
+ expect(Snippets::UpdateStatisticsService).not_to receive(:new)
+
+ pipeline.run
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index e7670085f60..81cbdcae9d1 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,20 +2,32 @@
require 'spec_helper'
+# Any new stages must be added to
+# `ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb` as well.
RSpec.describe BulkImports::Projects::Stage do
let(:pipelines) do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Projects::Pipelines::RepositoryPipeline],
+ [1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
[2, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::MilestonesPipeline],
+ [2, BulkImports::Common::Pipelines::BadgesPipeline],
[3, BulkImports::Projects::Pipelines::IssuesPipeline],
+ [3, BulkImports::Projects::Pipelines::SnippetsPipeline],
+ [4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
[4, BulkImports::Common::Pipelines::BoardsPipeline],
[4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline],
+ [4, BulkImports::Projects::Pipelines::CiPipelinesPipeline],
+ [4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
+ [4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
+ [4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
+ [5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
+ [5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]
]
end