Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 14:10:13 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 14:10:13 +0300
commit0ea3fcec397b69815975647f5e2aa5fe944a8486 (patch)
tree7979381b89d26011bcf9bdc989a40fcc2f1ed4ff /spec/lib/bulk_imports
parent72123183a20411a36d607d70b12d57c484394c8e (diff)
Add latest changes from gitlab-org/gitlab@15-1-stable-eev15.1.0-rc42
Diffstat (limited to 'spec/lib/bulk_imports')
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb93
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb171
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb84
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb40
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb169
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb77
6 files changed, 497 insertions, 137 deletions
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 8ce25ff87d7..528d65615b1 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -4,47 +4,86 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Stage do
let(:ancestor) { create(:group) }
- let(:group) { create(:group, parent: ancestor) }
+ let(:group) { build(:group, parent: ancestor) }
let(:bulk_import) { build(:bulk_import) }
- let(:entity) { build(:bulk_import_entity, bulk_import: bulk_import, group: group, destination_namespace: ancestor.full_path) }
-
- let(:pipelines) do
- [
- [0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::GroupAttributesPipeline],
- [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
- [1, BulkImports::Groups::Pipelines::NamespaceSettingsPipeline],
- [1, BulkImports::Common::Pipelines::MembersPipeline],
- [1, BulkImports::Common::Pipelines::LabelsPipeline],
- [1, BulkImports::Common::Pipelines::MilestonesPipeline],
- [1, BulkImports::Common::Pipelines::BadgesPipeline],
- [2, BulkImports::Common::Pipelines::BoardsPipeline],
- [2, BulkImports::Common::Pipelines::UploadsPipeline]
- ]
+ let(:entity) do
+ build(:bulk_import_entity, bulk_import: bulk_import, group: group, destination_namespace: ancestor.full_path)
end
it 'raises error when initialized without a BulkImport' do
- expect { described_class.new({}) }.to raise_error(ArgumentError, 'Expected an argument of type ::BulkImports::Entity')
+ expect { described_class.new({}) }.to raise_error(
+ ArgumentError, 'Expected an argument of type ::BulkImports::Entity'
+ )
end
- describe '.pipelines' do
- it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.new(entity).pipelines & pipelines).to contain_exactly(*pipelines)
- expect(described_class.new(entity).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
+ describe '#pipelines' do
+ it 'lists all the pipelines' do
+ pipelines = described_class.new(entity).pipelines
+
+ expect(pipelines).to include(
+ hash_including({
+ pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
+ stage: 0
+ }),
+ hash_including({
+ pipeline: BulkImports::Groups::Pipelines::GroupAttributesPipeline,
+ stage: 1
+ })
+ )
+ expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
+ end
+
+ it 'only has pipelines with valid keys' do
+ pipeline_keys = described_class.new(entity).pipelines.collect(&:keys).flatten.uniq
+ allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
+
+ expect(pipeline_keys - allowed_keys).to be_empty
+ end
+
+ it 'only has pipelines with valid versions' do
+ pipelines = described_class.new(entity).pipelines
+ minimum_source_versions = pipelines.collect { _1[:minimum_source_version] }.flatten.compact
+ maximum_source_versions = pipelines.collect { _1[:maximum_source_version] }.flatten.compact
+ version_regex = /^(\d+)\.(\d+)\.0$/
+
+ expect(minimum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ expect(maximum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ end
+
+ context 'when stages are out of order in the config hash' do
+ it 'lists all the pipelines ordered by stage' do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ a: { stage: 2 },
+ b: { stage: 1 },
+ c: { stage: 0 },
+ d: { stage: 2 }
+ }
+ )
+ end
+
+ expected_stages = described_class.new(entity).pipelines.collect { _1[:stage] }
+ expect(expected_stages).to eq([0, 1, 2, 2])
+ end
end
context 'when bulk_import_projects feature flag is enabled' do
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: true)
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
context 'when feature flag is enabled on root ancestor level' do
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: ancestor)
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
@@ -54,7 +93,9 @@ RSpec.describe BulkImports::Groups::Stage do
entity = create(:bulk_import_entity, destination_namespace: '')
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
end
@@ -63,7 +104,9 @@ RSpec.describe BulkImports::Groups::Stage do
it 'does not include project entities pipeline' do
stub_feature_flags(bulk_import_projects: false)
- expect(described_class.new(entity).pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline)
+ expect(described_class.new(entity).pipelines).not_to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
new file mode 100644
index 00000000000..39b539ece21
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
@@ -0,0 +1,171 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do
+ let_it_be(:design) { create(:design, :with_file) }
+
+ let(:portable) { create(:project) }
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:design_bundle_path) { File.join(tmpdir, 'design.bundle') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ design.repository.bundle_to_disk(design_bundle_path)
+
+ allow(portable).to receive(:lfs_enabled?).and_return(true)
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ it 'imports design repository into destination project and removes tmpdir' do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [design_bundle_path]))
+
+ expect(portable.design_repository).to receive(:create_from_bundle).with(design_bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(portable.design_repository.exists?).to eq(true)
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts design bundle filepath' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=design",
+ tmpdir: tmpdir,
+ filename: 'design.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'design.tar.gz')
+ .and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'design.tar')
+ .and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(design_bundle_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [design_bundle_path]))
+ end
+
+ it 'creates design repository from bundle' do
+ expect(portable.design_repository).to receive(:create_from_bundle).with(design_bundle_path).and_call_original
+
+ pipeline.load(context, design_bundle_path)
+
+ expect(portable.design_repository.exists?).to eq(true)
+ end
+
+ context 'when lfs is disabled' do
+ it 'returns' do
+ allow(portable).to receive(:lfs_enabled?).and_return(false)
+
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, design_bundle_path)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when file does not exist' do
+ it 'returns' do
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, File.join(tmpdir, 'bogus'))
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is directory' do
+ it 'returns' do
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, tmpdir)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, design_bundle_path), symlink)
+
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, symlink)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }
+ .to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }
+ .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
index aa9c7486c27..4320d5dc119 100644
--- a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -54,17 +54,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
subject(:pipeline) { described_class.new(context) }
- before do
- allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
- end
-
- after do
- FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
- end
-
describe '#run' do
before do
- allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
+ allow_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(
+ BulkImports::Pipeline::ExtractedData.new(data: project_attributes)
+ )
+ end
pipeline.run
end
@@ -84,46 +80,6 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
end
end
- describe '#extract' do
- before do
- file_download_service = instance_double("BulkImports::FileDownloadService")
- file_decompression_service = instance_double("BulkImports::FileDecompressionService")
-
- expect(BulkImports::FileDownloadService)
- .to receive(:new)
- .with(
- configuration: context.configuration,
- relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
- tmpdir: tmpdir,
- filename: 'self.json.gz')
- .and_return(file_download_service)
-
- expect(BulkImports::FileDecompressionService)
- .to receive(:new)
- .with(tmpdir: tmpdir, filename: 'self.json.gz')
- .and_return(file_decompression_service)
-
- expect(file_download_service).to receive(:execute)
- expect(file_decompression_service).to receive(:execute)
- end
-
- it 'downloads, decompresses & decodes json' do
- allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
-
- extracted_data = pipeline.extract(context)
-
- expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
- end
-
- context 'when json parsing error occurs' do
- it 'raises an error' do
- allow(pipeline).to receive(:json_attributes).and_return("invalid")
-
- expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
- end
- end
- end
-
describe '#transform' do
it 'removes prohibited attributes from hash' do
input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
@@ -145,35 +101,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
end
end
- describe '#json_attributes' do
- it 'reads raw json from file' do
- filepath = File.join(tmpdir, 'self.json')
-
- FileUtils.touch(filepath)
- expect_file_read(filepath)
-
- pipeline.json_attributes
- end
- end
-
describe '#after_run' do
- it 'removes tmp dir' do
- allow(FileUtils).to receive(:remove_entry).and_call_original
- expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+ it 'calls extractor#remove_tmpdir' do
+ expect_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
+ expect(extractor).to receive(:remove_tmpdir)
+ end
pipeline.after_run(nil)
-
- expect(Dir.exist?(tmpdir)).to eq(false)
- end
-
- context 'when dir does not exist' do
- it 'does not attempt to remove tmpdir' do
- FileUtils.remove_entry(tmpdir)
-
- expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
-
- pipeline.after_run(nil)
- end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
index 2279e66720e..2633598b48d 100644
--- a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
@@ -31,7 +31,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
'created_at' => '2019-12-26T10:17:14.621Z',
'updated_at' => '2019-12-26T10:17:14.621Z',
'released_at' => '2019-12-26T10:17:14.615Z',
- 'sha' => '901de3a8bd5573f4a049b1457d28bc1592ba6bf9'
+ 'sha' => '901de3a8bd5573f4a049b1457d28bc1592ba6bf9',
+ 'author_id' => user.id
}.merge(attributes)
end
@@ -45,11 +46,11 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [with_index]))
end
-
- pipeline.run
end
it 'imports release into destination project' do
+ pipeline.run
+
expect(project.releases.count).to eq(1)
imported_release = project.releases.last
@@ -62,6 +63,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
expect(imported_release.updated_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.released_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.sha).to eq(release['sha'])
+ expect(imported_release.author_id).to eq(release['author_id'])
end
end
@@ -78,6 +80,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
let(:attributes) {{ 'links' => [link] }}
it 'restores release links' do
+ pipeline.run
+
release_link = project.releases.last.links.first
aggregate_failures do
@@ -105,6 +109,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
let(:attributes) {{ 'milestone_releases' => [{ 'milestone' => milestone }] }}
it 'restores release milestone' do
+ pipeline.run
+
release_milestone = project.releases.last.milestone_releases.first.milestone
aggregate_failures do
@@ -118,5 +124,33 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
end
end
end
+
+ context 'evidences' do
+ it 'creates release evidence' do
+ expect(::Releases::CreateEvidenceWorker).to receive(:perform_async)
+
+ pipeline.run
+ end
+
+ context 'when release is historical' do
+ let(:attributes) {{ 'released_at' => '2018-12-26T10:17:14.621Z' }}
+
+ it 'does not create release evidence' do
+ expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
+
+ pipeline.run
+ end
+ end
+
+ context 'when release is upcoming' do
+ let(:attributes) {{ 'released_at' => Time.zone.now + 30.days }}
+
+ it 'does not create release evidence' do
+ expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
+
+ pipeline.run
+ end
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
new file mode 100644
index 00000000000..712c37ee578
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do
+ let_it_be(:source) { create(:project, :repository) }
+
+ let(:portable) { create(:project) }
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:bundle_path) { File.join(tmpdir, 'repository.bundle') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ source.repository.bundle_to_disk(bundle_path)
+
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ before do
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [bundle_path]))
+ end
+
+ it 'imports repository into destination project and removes tmpdir' do
+ expect(portable.repository).to receive(:create_from_bundle).with(bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(portable.repository.exists?).to eq(true)
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when something goes wrong during import' do
+ it 'marks entity as failed' do
+ allow(pipeline).to receive(:load).and_raise(StandardError)
+
+ pipeline.run
+
+ expect(entity.failed?).to eq(true)
+ end
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts repository bundle filepath' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=repository",
+ tmpdir: tmpdir,
+ filename: 'repository.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'repository.tar.gz')
+ .and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'repository.tar')
+ .and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(bundle_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [bundle_path]))
+ end
+
+ it 'creates repository from bundle' do
+ expect(portable.repository).to receive(:create_from_bundle).with(bundle_path).and_call_original
+
+ pipeline.load(context, bundle_path)
+
+ expect(portable.repository.exists?).to eq(true)
+ end
+
+ context 'when file does not exist' do
+ it 'returns' do
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, File.join(tmpdir, 'bogus'))
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is directory' do
+ it 'returns' do
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, tmpdir)
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, bundle_path), symlink)
+
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, symlink)
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }
+ .to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }
+ .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index e81d9cc5fb4..fc670d10655 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,38 +2,7 @@
require 'spec_helper'
-# Any new stages must be added to
-# `ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb` as well.
RSpec.describe BulkImports::Projects::Stage do
- let(:pipelines) do
- [
- [0, BulkImports::Projects::Pipelines::ProjectPipeline],
- [1, BulkImports::Projects::Pipelines::RepositoryPipeline],
- [1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
- [2, BulkImports::Common::Pipelines::LabelsPipeline],
- [2, BulkImports::Common::Pipelines::MilestonesPipeline],
- [2, BulkImports::Common::Pipelines::BadgesPipeline],
- [3, BulkImports::Projects::Pipelines::IssuesPipeline],
- [3, BulkImports::Projects::Pipelines::SnippetsPipeline],
- [4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
- [4, BulkImports::Common::Pipelines::BoardsPipeline],
- [4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
- [4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
- [4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline],
- [4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
- [4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
- [4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
- [4, BulkImports::Projects::Pipelines::ReleasesPipeline],
- [5, BulkImports::Projects::Pipelines::CiPipelinesPipeline],
- [5, BulkImports::Common::Pipelines::WikiPipeline],
- [5, BulkImports::Common::Pipelines::UploadsPipeline],
- [5, BulkImports::Common::Pipelines::LfsObjectsPipeline],
- [5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
- [5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
- [6, BulkImports::Common::Pipelines::EntityFinisher]
- ]
- end
-
subject do
entity = build(:bulk_import_entity, :project_entity)
@@ -41,9 +10,49 @@ RSpec.describe BulkImports::Projects::Stage do
end
describe '#pipelines' do
- it 'list all the pipelines with their stage number, ordered by stage' do
- expect(subject.pipelines & pipelines).to contain_exactly(*pipelines)
- expect(subject.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
+ it 'list all the pipelines' do
+ pipelines = subject.pipelines
+
+ expect(pipelines).to include(
+ hash_including({ stage: 0, pipeline: BulkImports::Projects::Pipelines::ProjectPipeline }),
+ hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline })
+ )
+ expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
+ end
+
+ it 'only have pipelines with valid keys' do
+ pipeline_keys = subject.pipelines.collect(&:keys).flatten.uniq
+ allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
+
+ expect(pipeline_keys - allowed_keys).to be_empty
+ end
+
+ it 'only has pipelines with valid versions' do
+ pipelines = subject.pipelines
+ minimum_source_versions = pipelines.collect { _1[:minimum_source_version] }.flatten.compact
+ maximum_source_versions = pipelines.collect { _1[:maximum_source_version] }.flatten.compact
+ version_regex = /^(\d+)\.(\d+)\.0$/
+
+ expect(minimum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ expect(maximum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ end
+
+ context 'when stages are out of order in the config hash' do
+ it 'list all the pipelines ordered by stage' do
+ allow_next_instance_of(BulkImports::Projects::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ a: { stage: 2 },
+ b: { stage: 1 },
+ c: { stage: 0 },
+ d: { stage: 2 }
+ }
+ )
+ end
+
+ expected_stages = subject.pipelines.collect { _1[:stage] }
+ expect(expected_stages).to eq([0, 1, 2, 2])
+ end
end
end
end