From 311b0269b4eb9839fa63f80c8d7a58f32b8138a0 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Thu, 18 Nov 2021 13:16:36 +0000 Subject: Add latest changes from gitlab-org/gitlab@14-5-stable-ee --- .../common/pipelines/milestones_pipeline_spec.rb | 154 +++++++++++ .../common/pipelines/uploads_pipeline_spec.rb | 80 ++++++ .../common/pipelines/wiki_pipeline_spec.rb | 25 ++ .../groups/graphql/get_milestones_query_spec.rb | 35 --- .../groups/loaders/group_loader_spec.rb | 58 +++- .../groups/pipelines/milestones_pipeline_spec.rb | 73 ----- spec/lib/bulk_imports/groups/stage_spec.rb | 2 +- spec/lib/bulk_imports/ndjson_pipeline_spec.rb | 3 + .../external_pull_requests_pipeline_spec.rb | 66 +++++ .../pipelines/merge_requests_pipeline_spec.rb | 297 +++++++++++++++++++++ .../pipelines/protected_branches_pipeline_spec.rb | 61 +++++ .../projects/pipelines/repository_pipeline_spec.rb | 97 +++---- spec/lib/bulk_imports/projects/stage_spec.rb | 11 +- 13 files changed, 797 insertions(+), 165 deletions(-) create mode 100644 spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb create mode 100644 spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb create mode 100644 spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb delete mode 100644 spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb delete mode 100644 spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb create mode 100644 spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb create mode 100644 spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb create mode 100644 spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb (limited to 'spec/lib/bulk_imports') diff --git a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb new file mode 100644 index 00000000000..9f71175f46f --- /dev/null +++ b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb @@ -0,0 +1,154 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:bulk_import) { create(:bulk_import, user: user) } + let(:tracker) { create(:bulk_import_tracker, entity: entity) } + let(:context) { BulkImports::Pipeline::Context.new(tracker) } + let(:source_project_id) { nil } # if set, then exported_milestone is a project milestone + let(:source_group_id) { nil } # if set, then exported_milestone is a group milestone + let(:exported_milestone_for_project) do + exported_milestone_for_group.merge( + 'events' => [{ + 'project_id' => source_project_id, + 'author_id' => 9, + 'created_at' => "2021-08-12T19:12:49.810Z", + 'updated_at' => "2021-08-12T19:12:49.810Z", + 'target_type' => "Milestone", + 'group_id' => source_group_id, + 'fingerprint' => 'f270eb9b27d0', + 'id' => 66, + 'action' => "created" + }] + ) + end + + let(:exported_milestone_for_group) do + { + 'id' => 1, + 'title' => "v1.0", + 'project_id' => source_project_id, + 'description' => "Amet velit repellat ut rerum aut cum.", + 'due_date' => "2019-11-22", + 'created_at' => "2019-11-20T17:02:14.296Z", + 'updated_at' => "2019-11-20T17:02:14.296Z", + 'state' => "active", + 'iid' => 2, + 'start_date' => "2019-11-21", + 'group_id' => source_group_id + } + end + + before do + group.add_owner(user) + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: exported_milestones)) + end + end + + subject { described_class.new(context) } + + shared_examples 'bulk_imports milestones pipeline' do + let(:tested_entity) { nil } + + describe '#run' do + it 'imports milestones into destination' do + expect { subject.run }.to change(Milestone, :count).by(1) + + imported_milestone = tested_entity.milestones.first + + expect(imported_milestone.title).to eq("v1.0") + expect(imported_milestone.description).to eq("Amet velit repellat ut rerum aut cum.") + expect(imported_milestone.due_date.to_s).to eq("2019-11-22") + expect(imported_milestone.created_at).to eq("2019-11-20T17:02:14.296Z") + expect(imported_milestone.updated_at).to eq("2019-11-20T17:02:14.296Z") + expect(imported_milestone.start_date.to_s).to eq("2019-11-21") + end + end + + describe '#load' do + context 'when milestone is not persisted' do + it 'saves the milestone' do + milestone = build(:milestone, group: group) + + expect(milestone).to receive(:save!) + + subject.load(context, milestone) + end + end + + context 'when milestone is persisted' do + it 'does not save milestone' do + milestone = create(:milestone, group: group) + + expect(milestone).not_to receive(:save!) + + subject.load(context, milestone) + end + end + + context 'when milestone is missing' do + it 'returns' do + expect(subject.load(context, nil)).to be_nil + end + end + end + end + + context 'group milestone' do + let(:exported_milestones) { [[exported_milestone_for_group, 0]] } + let(:entity) do + create( + :bulk_import_entity, + group: group, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Group', + destination_namespace: group.full_path + ) + end + + it_behaves_like 'bulk_imports milestones pipeline' do + let(:tested_entity) { group } + let(:source_group_id) { 1 } + end + end + + context 'project milestone' do + let(:project) { create(:project, group: group) } + let(:exported_milestones) { [[exported_milestone_for_project, 0]] } + + let(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + it_behaves_like 'bulk_imports milestones pipeline' do + let(:tested_entity) { project } + let(:source_project_id) { 1 } + + it 'imports events' do + subject.run + + imported_event = tested_entity.milestones.first.events.first + + expect(imported_event.created_at).to eq("2021-08-12T19:12:49.810Z") + expect(imported_event.updated_at).to eq("2021-08-12T19:12:49.810Z") + expect(imported_event.target_type).to eq("Milestone") + expect(imported_event.fingerprint).to eq("f270eb9b27d0") + expect(imported_event.action).to eq("created") + end + end + end +end diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb new file mode 100644 index 00000000000..a3cc866a406 --- /dev/null +++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do + let_it_be(:tmpdir) { Dir.mktmpdir } + let_it_be(:project) { create(:project) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') } + let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')} + + subject(:pipeline) { described_class.new(context) } + + before do + stub_uploads_object_storage(FileUploader) + + FileUtils.mkdir_p(uploads_dir_path) + FileUtils.touch(upload_file_path) + end + + after do + FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir) + end + + describe '#run' do + it 'imports uploads into destination portable and removes tmpdir' do + allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir) + allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path])) + + pipeline.run + + expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt') + + expect(Dir.exist?(tmpdir)).to eq(false) + end + end + + describe '#extract' do + it 'downloads & extracts upload paths' do + allow(Dir).to receive(:mktmpdir).and_return(tmpdir) + expect(pipeline).to receive(:untar_zxf) + file_download_service = instance_double("BulkImports::FileDownloadService") + + expect(BulkImports::FileDownloadService) + .to receive(:new) + .with( + configuration: context.configuration, + relative_url: "/projects/test/export_relations/download?relation=uploads", + dir: tmpdir, + filename: 'uploads.tar.gz') + .and_return(file_download_service) + + expect(file_download_service).to receive(:execute) + + extracted_data = pipeline.extract(context) + + expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path) + end + end + + describe '#load' do + it 'creates a file upload' do + expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1) + end + + context 'when dynamic path is nil' do + it 'returns' do + expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count } + end + end + + context 'when path is a directory' do + it 'returns' do + expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count } + end + end + end +end diff --git a/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb new file mode 100644 index 00000000000..0eefb7390dc --- /dev/null +++ b/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Pipelines::WikiPipeline do + describe '#run' do + let_it_be(:user) { create(:user) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:parent) { create(:project) } + + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Wiki', + destination_namespace: parent.full_path, + project: parent + ) + end + + it_behaves_like 'wiki pipeline imports a wiki for an entity' + end +end diff --git a/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb deleted file mode 100644 index 7a0f964c5f3..00000000000 --- a/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Graphql::GetMilestonesQuery do - it 'has a valid query' do - tracker = create(:bulk_import_tracker) - context = BulkImports::Pipeline::Context.new(tracker) - - query = GraphQL::Query.new( - GitlabSchema, - described_class.to_s, - variables: described_class.variables(context) - ) - result = GitlabSchema.static_validator.validate(query) - - expect(result[:errors]).to be_empty - end - - describe '#data_path' do - it 'returns data path' do - expected = %w[data group milestones nodes] - - expect(described_class.data_path).to eq(expected) - end - end - - describe '#page_info_path' do - it 'returns pagination information path' do - expected = %w[data group milestones page_info] - - expect(described_class.page_info_path).to eq(expected) - end - end -end diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb index de0b56045b3..69363bf0866 100644 --- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb +++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb @@ -11,20 +11,66 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } let(:service_double) { instance_double(::Groups::CreateService) } - let(:data) { { foo: :bar } } + let(:data) { { 'path' => 'test' } } subject { described_class.new } + context 'when path is missing' do + it 'raises an error' do + expect { subject.load(context, {}) }.to raise_error(described_class::GroupCreationError, 'Path is missing') + end + end + + context 'when destination namespace is not a group' do + it 'raises an error' do + entity.update!(destination_namespace: user.namespace.path) + + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'Destination is not a group') + end + end + + context 'when group exists' do + it 'raises an error' do + group1 = create(:group) + group2 = create(:group, parent: group1) + entity.update!(destination_namespace: group1.full_path) + data = { 'path' => group2.path } + + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'Group exists') + end + end + + context 'when there are other group errors' do + it 'raises an error with those errors' do + group = ::Group.new + group.validate + expected_errors = group.errors.full_messages.to_sentence + + expect(::Groups::CreateService) + .to receive(:new) + .with(context.current_user, data) + .and_return(service_double) + + expect(service_double).to receive(:execute).and_return(group) + expect(entity).not_to receive(:update!) + + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, expected_errors) + end + end + context 'when user can create group' do shared_examples 'calls Group Create Service to create a new group' do it 'calls Group Create Service to create a new group' do + group_double = instance_double(::Group) + expect(::Groups::CreateService) .to receive(:new) .with(context.current_user, data) .and_return(service_double) - expect(service_double).to receive(:execute) - expect(entity).to receive(:update!) + expect(service_double).to receive(:execute).and_return(group_double) + expect(group_double).to receive(:errors).and_return([]) + expect(entity).to receive(:update!).with(group: group_double) subject.load(context, data) end @@ -40,7 +86,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do context 'when there is parent group' do let(:parent) { create(:group) } - let(:data) { { 'parent_id' => parent.id } } + let(:data) { { 'parent_id' => parent.id, 'path' => 'test' } } before do allow(Ability).to receive(:allowed?).with(user, :create_subgroup, parent).and_return(true) @@ -55,7 +101,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do it 'does not create new group' do expect(::Groups::CreateService).not_to receive(:new) - subject.load(context, data) + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'User not allowed to create group') end end @@ -69,7 +115,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do context 'when there is parent group' do let(:parent) { create(:group) } - let(:data) { { 'parent_id' => parent.id } } + let(:data) { { 'parent_id' => parent.id, 'path' => 'test' } } before do allow(Ability).to receive(:allowed?).with(user, :create_subgroup, parent).and_return(false) diff --git a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb deleted file mode 100644 index a8354e62459..00000000000 --- a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb +++ /dev/null @@ -1,73 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do - let_it_be(:user) { create(:user) } - let_it_be(:group) { create(:group) } - let_it_be(:bulk_import) { create(:bulk_import, user: user) } - let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/milestones.ndjson.gz' } - let_it_be(:entity) do - create( - :bulk_import_entity, - group: group, - bulk_import: bulk_import, - source_full_path: 'source/full/path', - destination_name: 'My Destination Group', - destination_namespace: group.full_path - ) - end - - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - - let(:tmpdir) { Dir.mktmpdir } - - before do - FileUtils.copy_file(filepath, File.join(tmpdir, 'milestones.ndjson.gz')) - group.add_owner(user) - end - - subject { described_class.new(context) } - - describe '#run' do - it 'imports group milestones into destination group and removes tmpdir' do - allow(Dir).to receive(:mktmpdir).and_return(tmpdir) - allow_next_instance_of(BulkImports::FileDownloadService) do |service| - allow(service).to receive(:execute) - end - - expect { subject.run }.to change(Milestone, :count).by(5) - expect(group.milestones.pluck(:title)).to contain_exactly('v4.0', 'v3.0', 'v2.0', 'v1.0', 'v0.0') - expect(File.directory?(tmpdir)).to eq(false) - end - end - - describe '#load' do - context 'when milestone is not persisted' do - it 'saves the milestone' do - milestone = build(:milestone, group: group) - - expect(milestone).to receive(:save!) - - subject.load(context, milestone) - end - end - - context 'when milestone is persisted' do - it 'does not save milestone' do - milestone = create(:milestone, group: group) - - expect(milestone).not_to receive(:save!) - - subject.load(context, milestone) - end - end - - context 'when milestone is missing' do - it 'returns' do - expect(subject.load(context, nil)).to be_nil - end - end - end -end diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb index b322b7b0edf..5719acac4d7 100644 --- a/spec/lib/bulk_imports/groups/stage_spec.rb +++ b/spec/lib/bulk_imports/groups/stage_spec.rb @@ -12,7 +12,7 @@ RSpec.describe BulkImports::Groups::Stage do [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline], [1, BulkImports::Groups::Pipelines::MembersPipeline], [1, BulkImports::Common::Pipelines::LabelsPipeline], - [1, BulkImports::Groups::Pipelines::MilestonesPipeline], + [1, BulkImports::Common::Pipelines::MilestonesPipeline], [1, BulkImports::Groups::Pipelines::BadgesPipeline], [2, BulkImports::Common::Pipelines::BoardsPipeline] ] diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb index 7d156c2c3df..c5197fb29d9 100644 --- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb +++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb @@ -111,6 +111,7 @@ RSpec.describe BulkImports::NdjsonPipeline do context = double(portable: group, current_user: user, import_export_config: config, bulk_import: import_double, entity: entity_double) allow(subject).to receive(:import_export_config).and_return(config) allow(subject).to receive(:context).and_return(context) + relation_object = double expect(Gitlab::ImportExport::Group::RelationFactory) .to receive(:create) @@ -124,6 +125,8 @@ RSpec.describe BulkImports::NdjsonPipeline do user: user, excluded_keys: nil ) + .and_return(relation_object) + expect(relation_object).to receive(:assign_attributes).with(group: group) subject.transform(context, data) end diff --git a/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb new file mode 100644 index 00000000000..8f610fcc2ae --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline do + let_it_be(:project) { create(:project) } + let_it_be(:bulk_import) { create(:bulk_import) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:attributes) { {} } + let(:external_pr) { project.external_pull_requests.last } + let(:external_pull_request) do + { + 'pull_request_iid' => 4, + 'source_branch' => 'feature', + 'target_branch' => 'main', + 'source_repository' => 'repository', + 'target_repository' => 'repository', + 'source_sha' => 'abc', + 'target_sha' => 'xyz', + 'status' => 'open', + 'created_at' => '2019-12-24T14:04:50.053Z', + 'updated_at' => '2019-12-24T14:05:18.138Z' + }.merge(attributes) + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + before do + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:remove_tmp_dir) + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[external_pull_request, 0]])) + end + + pipeline.run + end + + it 'imports external pull request', :aggregate_failures do + expect(external_pr.pull_request_iid).to eq(external_pull_request['pull_request_iid']) + expect(external_pr.source_branch).to eq(external_pull_request['source_branch']) + expect(external_pr.target_branch).to eq(external_pull_request['target_branch']) + expect(external_pr.status).to eq(external_pull_request['status']) + expect(external_pr.created_at).to eq(external_pull_request['created_at']) + expect(external_pr.updated_at).to eq(external_pull_request['updated_at']) + end + + context 'when status is closed' do + let(:attributes) { { 'status' => 'closed' } } + + it 'imports closed external pull request' do + expect(external_pr.status).to eq(attributes['status']) + end + end + + context 'when from fork' do + let(:attributes) { { 'source_repository' => 'source' } } + + it 'does not create external pull request' do + expect(external_pr).to be_nil + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb new file mode 100644 index 00000000000..3f02356b41e --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb @@ -0,0 +1,297 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::MergeRequestsPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, :repository, group: group) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:mr) do + { + 'iid' => 7, + 'author_id' => 22, + 'source_project_id' => 1234, + 'target_project_id' => 1234, + 'title' => 'Imported MR', + 'description' => 'Description', + 'state' => 'opened', + 'source_branch' => 'feature', + 'target_branch' => 'main', + 'source_branch_sha' => 'ABCD', + 'target_branch_sha' => 'DCBA', + 'created_at' => '2020-06-14T15:02:47.967Z', + 'updated_at' => '2020-06-14T15:03:47.967Z', + 'merge_request_diff' => { + 'state' => 'collected', + 'base_commit_sha' => 'ae73cb07c9eeaf35924a10f713b364d32b2dd34f', + 'head_commit_sha' => 'a97f74ddaa848b707bea65441c903ae4bf5d844d', + 'start_commit_sha' => '9eea46b5c72ead701c22f516474b95049c9d9462', + 'merge_request_diff_commits' => [ + { + 'sha' => 'COMMIT1', + 'relative_order' => 0, + 'message' => 'commit message', + 'authored_date' => '2014-08-06T08:35:52.000+02:00', + 'committed_date' => '2014-08-06T08:35:52.000+02:00', + 'commit_author' => { + 'name' => 'Commit Author', + 'email' => 'gitlab@example.com' + }, + 'committer' => { + 'name' => 'Committer', + 'email' => 'committer@example.com' + } + } + ], + 'merge_request_diff_files' => [ + { + 'relative_order' => 0, + 'utf8_diff' => '--- a/.gitignore\n+++ b/.gitignore\n@@ -1 +1 @@ test\n', + 'new_path' => '.gitignore', + 'old_path' => '.gitignore', + 'a_mode' => '100644', + 'b_mode' => '100644', + 'new_file' => false, + 'renamed_file' => false, + 'deleted_file' => false, + 'too_large' => false + } + ] + } + }.merge(attributes) + end + + let(:attributes) { {} } + let(:imported_mr) { project.merge_requests.find_by_title(mr['title']) } + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + before do + group.add_owner(user) + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:remove_tmp_dir) + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[mr, 0]])) + end + + allow(project.repository).to receive(:fetch_source_branch!).and_return(true) + allow(project.repository).to receive(:branch_exists?).and_return(false) + allow(project.repository).to receive(:create_branch) + + pipeline.run + end + + it 'imports a merge request' do + expect(project.merge_requests.count).to eq(1) + expect(imported_mr.title).to eq(mr['title']) + expect(imported_mr.description).to eq(mr['description']) + expect(imported_mr.state).to eq(mr['state']) + expect(imported_mr.iid).to eq(mr['iid']) + expect(imported_mr.created_at).to eq(mr['created_at']) + expect(imported_mr.updated_at).to eq(mr['updated_at']) + expect(imported_mr.author).to eq(user) + end + + context 'merge request state' do + context 'when mr is closed' do + let(:attributes) { { 'state' => 'closed' } } + + it 'imported mr as closed' do + expect(imported_mr.state).to eq(attributes['state']) + end + end + + context 'when mr is merged' do + let(:attributes) { { 'state' => 'merged' } } + + it 'imported mr as merged' do + expect(imported_mr.state).to eq(attributes['state']) + end + end + end + + context 'source & target project' do + it 'has the new project as target' do + expect(imported_mr.target_project).to eq(project) + end + + it 'has the new project as source' do + expect(imported_mr.source_project).to eq(project) + end + + context 'when source/target projects differ' do + let(:attributes) { { 'source_project_id' => 4321 } } + + it 'has no source' do + expect(imported_mr.source_project).to be_nil + end + + context 'when diff_head_sha is present' do + let(:attributes) { { 'diff_head_sha' => 'HEAD', 'source_project_id' => 4321 } } + + it 'has the new project as source' do + expect(imported_mr.source_project).to eq(project) + end + end + end + end + + context 'resource label events' do + let(:attributes) { { 'resource_label_events' => [{ 'action' => 'add', 'user_id' => 1 }] } } + + it 'restores resource label events' do + expect(imported_mr.resource_label_events.first.action).to eq('add') + end + end + + context 'award emoji' do + let(:attributes) { { 'award_emoji' => [{ 'name' => 'tada', 'user_id' => 22 }] } } + + it 'has award emoji' do + expect(imported_mr.award_emoji.first.name).to eq(attributes['award_emoji'].first['name']) + end + end + + context 'notes' do + let(:note) { imported_mr.notes.first } + let(:attributes) do + { + 'notes' => [ + { + 'note' => 'Issue note', + 'note_html' => '

something else entirely

', + 'cached_markdown_version' => 917504, + 'author_id' => 22, + 'author' => { 'name' => 'User 22' }, + 'created_at' => '2016-06-14T15:02:56.632Z', + 'updated_at' => '2016-06-14T15:02:47.770Z', + 'award_emoji' => [{ 'name' => 'clapper', 'user_id' => 22 }] + } + ] + } + end + + it 'imports mr note' do + expect(note).to be_present + expect(note.note).to include('By User 22') + expect(note.note).to include(attributes['notes'].first['note']) + expect(note.author).to eq(user) + end + + it 'has award emoji' do + emoji = note.award_emoji.first + + expect(emoji.name).to eq('clapper') + expect(emoji.user).to eq(user) + end + + it 'does not import note_html' do + expect(note.note_html).to match(attributes['notes'].first['note']) + expect(note.note_html).not_to match(attributes['notes'].first['note_html']) + end + end + + context 'system note metadata' do + let(:attributes) do + { + 'notes' => [ + { + 'note' => 'added 3 commits', + 'system' => true, + 'author_id' => 22, + 'author' => { 'name' => 'User 22' }, + 'created_at' => '2016-06-14T15:02:56.632Z', + 'updated_at' => '2016-06-14T15:02:47.770Z', + 'system_note_metadata' => { 'action' => 'commit', 'commit_count' => 3 } + } + ] + } + end + + it 'restores system note metadata' do + note = imported_mr.notes.first + + expect(note.system).to eq(true) + expect(note.noteable_type).to eq('MergeRequest') + expect(note.system_note_metadata.action).to eq('commit') + expect(note.system_note_metadata.commit_count).to eq(3) + end + end + + context 'diffs' do + it 'imports merge request diff' do + expect(imported_mr.merge_request_diff).to be_present + end + + it 'has the correct data for merge request latest_merge_request_diff' do + expect(imported_mr.latest_merge_request_diff_id).to eq(imported_mr.merge_request_diffs.maximum(:id)) + end + + it 'imports diff files' do + expect(imported_mr.merge_request_diff.merge_request_diff_files.count).to eq(1) + end + + context 'diff commits' do + it 'imports diff commits' do + expect(imported_mr.merge_request_diff.merge_request_diff_commits.count).to eq(1) + end + + it 'assigns committer and author details to diff commits' do + commit = imported_mr.merge_request_diff.merge_request_diff_commits.first + + expect(commit.commit_author_id).not_to be_nil + expect(commit.committer_id).not_to be_nil + end + + it 'assigns the correct commit users to diff commits' do + commit = MergeRequestDiffCommit.find_by(sha: 'COMMIT1') + + expect(commit.commit_author.name).to eq('Commit Author') + expect(commit.commit_author.email).to eq('gitlab@example.com') + expect(commit.committer.name).to eq('Committer') + expect(commit.committer.email).to eq('committer@example.com') + end + end + end + + context 'labels' do + let(:attributes) do + { + 'label_links' => [ + { 'label' => { 'title' => 'imported label 1', 'type' => 'ProjectLabel' } }, + { 'label' => { 'title' => 'imported label 2', 'type' => 'ProjectLabel' } } + ] + } + end + + it 'imports labels' do + expect(imported_mr.labels.pluck(:title)).to contain_exactly('imported label 1', 'imported label 2') + end + end + + context 'milestone' do + let(:attributes) { { 'milestone' => { 'title' => 'imported milestone' } } } + + it 'imports milestone' do + expect(imported_mr.milestone.title).to eq(attributes.dig('milestone', 'title')) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb new file mode 100644 index 00000000000..7de2e266192 --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ProtectedBranchesPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:protected_branch) do + { + 'name' => 'main', + 'created_at' => '2016-06-14T15:02:47.967Z', + 'updated_at' => '2016-06-14T15:02:47.967Z', + 'merge_access_levels' => [ + { + 'access_level' => 40, + 'created_at' => '2016-06-15T15:02:47.967Z', + 'updated_at' => '2016-06-15T15:02:47.967Z' + } + ], + 'push_access_levels' => [ + { + 'access_level' => 30, + 'created_at' => '2016-06-16T15:02:47.967Z', + 'updated_at' => '2016-06-16T15:02:47.967Z' + } + ] + } + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + it 'imports protected branch information' do + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [protected_branch, 0])) + end + + pipeline.run + + imported_protected_branch = project.protected_branches.last + merge_access_level = imported_protected_branch.merge_access_levels.first + push_access_level = imported_protected_branch.push_access_levels.first + + aggregate_failures do + expect(imported_protected_branch.name).to eq(protected_branch['name']) + expect(imported_protected_branch.updated_at).to eq(protected_branch['updated_at']) + expect(imported_protected_branch.created_at).to eq(protected_branch['created_at']) + expect(merge_access_level.access_level).to eq(protected_branch['merge_access_levels'].first['access_level']) + expect(merge_access_level.created_at).to eq(protected_branch['merge_access_levels'].first['created_at']) + expect(merge_access_level.updated_at).to eq(protected_branch['merge_access_levels'].first['updated_at']) + expect(push_access_level.access_level).to eq(protected_branch['push_access_levels'].first['access_level']) + expect(push_access_level.created_at).to eq(protected_branch['push_access_levels'].first['created_at']) + expect(push_access_level.updated_at).to eq(protected_branch['push_access_levels'].first['updated_at']) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb index af39ec7a11c..583485faf8d 100644 --- a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb @@ -3,71 +3,72 @@ require 'spec_helper' RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do - describe '#run' do - let_it_be(:user) { create(:user) } - let_it_be(:parent) { create(:project) } - let_it_be(:bulk_import) { create(:bulk_import, user: user) } - let_it_be(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) } - - let_it_be(:entity) do - create( - :bulk_import_entity, - :project_entity, - bulk_import: bulk_import, - source_full_path: 'source/full/path', - destination_name: 'My Destination Repository', - destination_namespace: parent.full_path, - project: parent - ) - end + let_it_be(:user) { create(:user) } + let_it_be(:parent) { create(:project) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) } + + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Repository', + destination_namespace: parent.full_path, + project: parent + ) + end - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - context 'successfully imports repository' do - let(:project_data) do - { - 'httpUrlToRepo' => 'http://test.git' - } - end + let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: project_data) } - subject { described_class.new(context) } + subject(:pipeline) { described_class.new(context) } + + before do + allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(extracted_data) + end + end + + describe '#run' do + context 'successfully imports repository' do + let(:project_data) { { 'httpUrlToRepo' => 'http://test.git' } } it 'imports new repository into destination project' do - allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| - allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data)) - end + url = project_data['httpUrlToRepo'].sub("://", "://oauth2:#{bulk_import_configuration.access_token}@") - expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service| - url = project_data['httpUrlToRepo'].sub("://", "://oauth2:#{bulk_import_configuration.access_token}@") - expect(repository_service).to receive(:import_repository).with(url).and_return 0 - end + expect(context.portable).to receive(:ensure_repository) + expect(context.portable.repository).to receive(:fetch_as_mirror).with(url) - subject.run + pipeline.run end end context 'blocked local networks' do - let(:project_data) do - { - 'httpUrlToRepo' => 'http://localhost/foo.git' - } - end + let(:project_data) { { 'httpUrlToRepo' => 'http://localhost/foo.git' } } - before do + it 'imports new repository into destination project' do allow(Gitlab.config.gitlab).to receive(:host).and_return('notlocalhost.gitlab.com') allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false) - allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| - allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data)) - end - end - subject { described_class.new(context) } + pipeline.run - it 'imports new repository into destination project' do - subject.run - expect(context.entity.failed?).to be_truthy + expect(context.entity.failed?).to eq(true) end end end + + describe '#after_run' do + it 'executes housekeeping service after import' do + service = instance_double(Repositories::HousekeepingService) + + expect(Repositories::HousekeepingService).to receive(:new).with(context.portable, :gc).and_return(service) + expect(service).to receive(:execute) + + pipeline.after_run(context) + end + end end diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb index c606cf7c556..e7670085f60 100644 --- a/spec/lib/bulk_imports/projects/stage_spec.rb +++ b/spec/lib/bulk_imports/projects/stage_spec.rb @@ -8,9 +8,15 @@ RSpec.describe BulkImports::Projects::Stage do [0, BulkImports::Projects::Pipelines::ProjectPipeline], [1, BulkImports::Projects::Pipelines::RepositoryPipeline], [2, BulkImports::Common::Pipelines::LabelsPipeline], + [2, BulkImports::Common::Pipelines::MilestonesPipeline], [3, BulkImports::Projects::Pipelines::IssuesPipeline], [4, BulkImports::Common::Pipelines::BoardsPipeline], - [5, BulkImports::Common::Pipelines::EntityFinisher] + [4, BulkImports::Projects::Pipelines::MergeRequestsPipeline], + [4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline], + [4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline], + [5, BulkImports::Common::Pipelines::WikiPipeline], + [5, BulkImports::Common::Pipelines::UploadsPipeline], + [6, BulkImports::Common::Pipelines::EntityFinisher] ] end @@ -22,7 +28,8 @@ RSpec.describe BulkImports::Projects::Stage do describe '#pipelines' do it 'list all the pipelines with their stage number, ordered by stage' do - expect(subject.pipelines).to eq(pipelines) + expect(subject.pipelines & pipelines).to contain_exactly(*pipelines) + expect(subject.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher) end end end -- cgit v1.2.3