Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/bulk_imports/common')
-rw-r--r--spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb15
-rw-r--r--spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb56
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb210
-rw-r--r--spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb161
4 files changed, 436 insertions, 6 deletions
diff --git a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
index 80607485b6e..50c54a7b47f 100644
--- a/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
+++ b/spec/lib/bulk_imports/common/extractors/graphql_extractor_spec.rb
@@ -8,12 +8,15 @@ RSpec.describe BulkImports::Common::Extractors::GraphqlExtractor do
let(:response) { double(original_hash: { 'data' => { 'foo' => 'bar' }, 'page_info' => {} }) }
let(:options) do
{
- query: double(
- to_s: 'test',
- variables: {},
- data_path: %w[data foo],
- page_info_path: %w[data page_info]
- )
+ query:
+ double(
+ new: double(
+ to_s: 'test',
+ variables: {},
+ data_path: %w[data foo],
+ page_info_path: %w[data page_info]
+ )
+ )
}
end
diff --git a/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
new file mode 100644
index 00000000000..e3a7335a238
--- /dev/null
+++ b/spec/lib/bulk_imports/common/graphql/get_members_query_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Graphql::GetMembersQuery do
+ let(:entity) { create(:bulk_import_entity, :group_entity) }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:query) { described_class.new(context: context) }
+
+ it 'has a valid query' do
+ parsed_query = GraphQL::Query.new(
+ GitlabSchema,
+ query.to_s,
+ variables: query.variables
+ )
+ result = GitlabSchema.static_validator.validate(parsed_query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ describe '#data_path' do
+ it 'returns data path' do
+ expected = %w[data portable members nodes]
+
+ expect(query.data_path).to eq(expected)
+ end
+ end
+
+ describe '#page_info_path' do
+ it 'returns pagination information path' do
+ expected = %w[data portable members page_info]
+
+ expect(query.page_info_path).to eq(expected)
+ end
+ end
+
+ describe '#to_s' do
+ context 'when entity is group' do
+ it 'queries group & group members' do
+ expect(query.to_s).to include('group')
+ expect(query.to_s).to include('groupMembers')
+ end
+ end
+
+ context 'when entity is project' do
+ let(:entity) { create(:bulk_import_entity, :project_entity) }
+
+ it 'queries project & project members' do
+ expect(query.to_s).to include('project')
+ expect(query.to_s).to include('projectMembers')
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
new file mode 100644
index 00000000000..b769aa4af5a
--- /dev/null
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -0,0 +1,210 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do
+ let_it_be(:portable) { create(:project) }
+ let_it_be(:oid) { 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' }
+
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:lfs_dir_path) { tmpdir }
+ let(:lfs_json_file_path) { File.join(lfs_dir_path, 'lfs_objects.json')}
+ let(:lfs_file_path) { File.join(lfs_dir_path, oid)}
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ FileUtils.mkdir_p(lfs_dir_path)
+ FileUtils.touch(lfs_json_file_path)
+ FileUtils.touch(lfs_file_path)
+ File.write(lfs_json_file_path, { oid => [0, 1, 2, nil] }.to_json )
+
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ it 'imports lfs objects into destination project and removes tmpdir' do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [lfs_json_file_path, lfs_file_path]))
+
+ pipeline.run
+
+ expect(portable.lfs_objects.count).to eq(1)
+ expect(portable.lfs_objects_projects.count).to eq(4)
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts lfs objects filepaths' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=lfs_objects",
+ tmpdir: tmpdir,
+ filename: 'lfs_objects.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService).to receive(:new).with(tmpdir: tmpdir, filename: 'lfs_objects.tar.gz').and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService).to receive(:new).with(tmpdir: tmpdir, filename: 'lfs_objects.tar').and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(lfs_json_file_path, lfs_file_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [lfs_json_file_path, lfs_file_path]))
+ end
+
+ context 'when file path is lfs json' do
+ it 'returns' do
+ filepath = File.join(tmpdir, 'lfs_objects.json')
+
+ allow(Gitlab::Json).to receive(:parse).with(filepath).and_return({})
+
+ expect { pipeline.load(context, filepath) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'when file path is tar file' do
+ it 'returns' do
+ filepath = File.join(tmpdir, 'lfs_objects.tar')
+
+ expect { pipeline.load(context, filepath) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'when lfs json read failed' do
+ it 'raises an error' do
+ File.write(lfs_json_file_path, 'invalid json')
+
+ expect { pipeline.load(context, lfs_file_path) }.to raise_error(BulkImports::Error, 'LFS Objects JSON read failed')
+ end
+ end
+
+ context 'when file path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+
+ context 'when file path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }.to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when file path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, lfs_file_path), symlink)
+
+ expect { pipeline.load(context, symlink) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'when path is a directory' do
+ it 'returns' do
+ expect { pipeline.load(context, Dir.tmpdir) }.not_to change { portable.lfs_objects.count }
+ end
+ end
+
+ context 'lfs objects project' do
+ context 'when lfs objects json is invalid' do
+ context 'when oid value is not Array' do
+ it 'does not create lfs objects project' do
+ File.write(lfs_json_file_path, { oid => 'test' }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
+ end
+ end
+
+ context 'when oid value is nil' do
+ it 'does not create lfs objects project' do
+ File.write(lfs_json_file_path, { oid => nil }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
+ end
+ end
+
+ context 'when oid value is not allowed' do
+ it 'does not create lfs objects project' do
+ File.write(lfs_json_file_path, { oid => ['invalid'] }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
+ end
+ end
+
+ context 'when repository type is duplicated' do
+ it 'creates only one lfs objects project' do
+ File.write(lfs_json_file_path, { oid => [0, 0, 1, 1, 2, 2] }.to_json )
+
+ expect { pipeline.load(context, lfs_file_path) }.to change { portable.lfs_objects_projects.count }.by(3)
+ end
+ end
+ end
+
+ context 'when lfs objects project fails to be created' do
+ it 'logs the failure' do
+ allow_next_instance_of(LfsObjectsProject) do |object|
+ allow(object).to receive(:persisted?).and_return(false)
+ end
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:warn)
+ .with(project_id: portable.id,
+ message: 'Failed to save lfs objects project',
+ errors: '', **Gitlab::ApplicationContext.current)
+ .exactly(4).times
+ end
+
+ pipeline.load(context, lfs_file_path)
+ end
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb
new file mode 100644
index 00000000000..f9b95f79104
--- /dev/null
+++ b/spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Pipelines::MembersPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:member_user1) { create(:user, email: 'email1@email.com') }
+ let_it_be(:member_user2) { create(:user, email: 'email2@email.com') }
+ let_it_be(:member_data) do
+ {
+ user_id: member_user1.id,
+ created_by_id: member_user2.id,
+ access_level: 30,
+ created_at: '2020-01-01T00:00:00Z',
+ updated_at: '2020-01-01T00:00:00Z',
+ expires_at: nil
+ }
+ end
+
+ let(:parent) { create(:group) }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let(:members) { portable.members.map { |m| m.slice(:user_id, :access_level) } }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ def extracted_data(email:, has_next_page: false)
+ data = {
+ 'created_at' => '2020-01-01T00:00:00Z',
+ 'updated_at' => '2020-01-02T00:00:00Z',
+ 'expires_at' => nil,
+ 'access_level' => {
+ 'integer_value' => 30
+ },
+ 'user' => {
+ 'public_email' => email
+ }
+ }
+
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
+ end
+
+ shared_examples 'members import' do
+ before do
+ portable.members.delete_all
+ end
+
+ describe '#run' do
+ it 'creates memberships for existing users' do
+ first_page = extracted_data(email: member_user1.email, has_next_page: true)
+ last_page = extracted_data(email: member_user2.email)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(first_page, last_page)
+ end
+
+ expect { pipeline.run }.to change(portable.members, :count).by(2)
+
+ expect(members).to contain_exactly(
+ { user_id: member_user1.id, access_level: 30 },
+ { user_id: member_user2.id, access_level: 30 }
+ )
+ end
+ end
+
+ describe '#load' do
+ it 'creates new membership' do
+ expect { subject.load(context, member_data) }.to change(portable.members, :count).by(1)
+
+ member = portable.members.find_by_user_id(member_user1.id)
+
+ expect(member.user).to eq(member_user1)
+ expect(member.created_by).to eq(member_user2)
+ expect(member.access_level).to eq(30)
+ expect(member.created_at).to eq('2020-01-01T00:00:00Z')
+ expect(member.updated_at).to eq('2020-01-01T00:00:00Z')
+ expect(member.expires_at).to eq(nil)
+ end
+
+ context 'when user_id is current user id' do
+ it 'does not create new membership' do
+ data = { user_id: user.id }
+
+ expect { pipeline.load(context, data) }.not_to change(portable.members, :count)
+ end
+ end
+
+ context 'when data is nil' do
+ it 'does not create new membership' do
+ expect { pipeline.load(context, nil) }.not_to change(portable.members, :count)
+ end
+ end
+
+ context 'when user membership already exists with the same access level' do
+ it 'does not create new membership' do
+ portable.members.create!(member_data)
+
+ expect { pipeline.load(context, member_data) }.not_to change(portable.members, :count)
+ end
+ end
+
+ context 'when portable is in a parent group' do
+ let(:tracker) { create(:bulk_import_tracker, entity: entity_with_parent) }
+
+ before do
+ parent.members.create!(member_data)
+ end
+
+ context 'when the same membership exists in parent group' do
+ it 'does not create new membership' do
+ expect { pipeline.load(context, member_data) }.not_to change(portable_with_parent.members, :count)
+ end
+ end
+
+ context 'when membership with higher access level exists in parent group' do
+ it 'creates new direct membership' do
+ data = member_data.merge(access_level: Gitlab::Access::MAINTAINER)
+
+ expect { pipeline.load(context, data) }.to change(portable_with_parent.members, :count)
+
+ member = portable_with_parent.members.find_by_user_id(member_user1.id)
+
+ expect(member.access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
+ context 'when membership with lower access level exists in parent group' do
+ it 'does not create new membership' do
+ data = member_data.merge(access_level: Gitlab::Access::GUEST)
+
+ expect { pipeline.load(context, data) }.not_to change(portable_with_parent.members, :count)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when importing to group' do
+ let(:portable) { create(:group) }
+ let(:portable_with_parent) { create(:group, parent: parent) }
+ let(:entity) { create(:bulk_import_entity, :group_entity, group: portable, bulk_import: bulk_import) }
+ let(:entity_with_parent) { create(:bulk_import_entity, :group_entity, group: portable_with_parent, bulk_import: bulk_import) }
+
+ include_examples 'members import'
+ end
+
+ context 'when importing to project' do
+ let(:portable) { create(:project) }
+ let(:portable_with_parent) { create(:project, namespace: parent) }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, bulk_import: bulk_import) }
+ let(:entity_with_parent) { create(:bulk_import_entity, :project_entity, project: portable_with_parent, bulk_import: bulk_import) }
+
+ include_examples 'members import'
+ end
+end