Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
commitaee0a117a889461ce8ced6fcf73207fe017f1d99 (patch)
tree891d9ef189227a8445d83f35c1b0fc99573f4380 /spec/lib/bulk_imports/projects
parent8d46af3258650d305f53b819eabf7ab18d22f59e (diff)
Add latest changes from gitlab-org/gitlab@14-6-stable-eev14.6.0-rc42
Diffstat (limited to 'spec/lib/bulk_imports/projects')
-rw-r--r--spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb58
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb52
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb176
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb40
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb64
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb159
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb45
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb11
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb27
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb119
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb168
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb12
12 files changed, 931 insertions, 0 deletions
diff --git a/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
new file mode 100644
index 00000000000..b680fa5cbfc
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Graphql::GetSnippetRepositoryQuery do
+ describe 'query repository based on full_path' do
+ let_it_be(:entity) { create(:bulk_import_entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ it 'has a valid query' do
+ query = GraphQL::Query.new(
+ GitlabSchema,
+ described_class.to_s,
+ variables: described_class.variables(context)
+ )
+ result = GitlabSchema.static_validator.validate(query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ it 'returns snippet httpUrlToRepo' do
+ expect(described_class.to_s).to include('httpUrlToRepo')
+ end
+
+ it 'returns snippet createdAt' do
+ expect(described_class.to_s).to include('createdAt')
+ end
+
+ it 'returns snippet title' do
+ expect(described_class.to_s).to include('title')
+ end
+
+ describe '.variables' do
+ it 'queries project based on source_full_path and pagination' do
+ expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 }
+
+ expect(described_class.variables(context)).to eq(expected)
+ end
+ end
+
+ describe '.data_path' do
+ it '.data_path returns data path' do
+ expected = %w[data project snippets nodes]
+
+ expect(described_class.data_path).to eq(expected)
+ end
+ end
+
+ describe '.page_info_path' do
+ it '.page_info_path returns pagination information path' do
+ expected = %w[data project snippets page_info]
+
+ expect(described_class.page_info_path).to eq(expected)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb
new file mode 100644
index 00000000000..e2744a6a457
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:auto_devops) do
+ {
+ 'created_at' => '2016-06-13T15:02:47.967Z',
+ 'updated_at' => '2016-06-14T15:02:47.967Z',
+ 'enabled' => true,
+ 'deploy_strategy' => 'continuous'
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports auto devops options into destination project' do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [auto_devops]))
+ end
+
+ pipeline.run
+
+ expect(project.auto_devops.enabled).to be_truthy
+ expect(project.auto_devops.deploy_strategy).to eq('continuous')
+ expect(project.auto_devops.created_at).to eq('2016-06-13T15:02:47.967Z')
+ expect(project.auto_devops.updated_at).to eq('2016-06-14T15:02:47.967Z')
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
new file mode 100644
index 00000000000..98a2e8b6a57
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let(:ci_pipeline_attributes) { {} }
+ let(:ci_pipeline) do
+ {
+ sha: "fakesha",
+ ref: "fakeref",
+ project: project,
+ source: "web"
+ }.merge(ci_pipeline_attributes)
+ end
+
+ let(:ci_pipeline2) do
+ {
+ sha: "fakesha2",
+ ref: "fakeref2",
+ project: project,
+ source: "web"
+ }.merge(ci_pipeline_attributes)
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(
+ BulkImports::Pipeline::ExtractedData.new(data: [ci_pipeline, ci_pipeline2])
+ )
+ end
+
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_source_branch!)
+ end
+
+ pipeline.run
+ end
+
+ it 'imports Ci::Pipeline into destination project' do
+ expect(project.all_pipelines.count).to eq(2)
+ expect(project.ci_pipelines.first.sha).to eq('fakesha')
+ expect(project.ci_pipelines.second.sha).to eq('fakesha2')
+ end
+
+ context 'notes' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'notes' => [
+ {
+ 'note' => 'test note',
+ 'author_id' => 22,
+ 'noteable_type' => 'Commit',
+ 'sha' => '',
+ 'author' => {
+ 'name' => 'User 22'
+ },
+ 'commit_id' => 'fakesha',
+ 'updated_at' => '2016-06-14T15:02:47.770Z',
+ 'events' => [
+ {
+ 'action' => 'created',
+ 'author_id' => 22
+ }
+ ]
+ }
+ ]
+ }
+ end
+
+ it 'imports pipeline with notes' do
+ note = project.all_pipelines.first.notes.first
+ expect(note.note).to include('test note')
+ expect(note.events.first.action).to eq('created')
+ end
+ end
+
+ context 'stages' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'stages' => [
+ {
+ 'name' => 'test stage',
+ 'statuses' => [
+ {
+ 'name' => 'first status',
+ 'status' => 'created'
+ }
+ ]
+ }
+ ]
+ }
+ end
+
+ it 'imports pipeline with notes' do
+ stage = project.all_pipelines.first.stages.first
+ expect(stage.name).to eq('test stage')
+ expect(stage.statuses.first.name).to eq('first status')
+ end
+ end
+
+ context 'external pull request' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'source' => 'external_pull_request_event',
+ 'external_pull_request' => {
+ 'source_branch' => 'test source branch',
+ 'target_branch' => 'master',
+ 'source_sha' => 'testsha',
+ 'target_sha' => 'targetsha',
+ 'source_repository' => 'test repository',
+ 'target_repository' => 'test repository',
+ 'status' => 1,
+ 'pull_request_iid' => 1
+ }
+ }
+ end
+
+ it 'imports pipeline with external pull request' do
+ pull_request = project.all_pipelines.first.external_pull_request
+ expect(pull_request.source_branch).to eq('test source branch')
+ expect(pull_request.status).to eq('open')
+ end
+ end
+
+ context 'merge request' do
+ let(:ci_pipeline_attributes) do
+ {
+ 'source' => 'merge_request_event',
+ 'merge_request' => {
+ 'description' => 'test merge request',
+ 'title' => 'test MR',
+ 'source_branch' => 'test source branch',
+ 'target_branch' => 'master',
+ 'source_sha' => 'testsha',
+ 'target_sha' => 'targetsha',
+ 'source_repository' => 'test repository',
+ 'target_repository' => 'test repository',
+ 'target_project_id' => project.id,
+ 'source_project_id' => project.id,
+ 'author_id' => user.id
+ }
+ }
+ end
+
+ it 'imports pipeline with external pull request' do
+ merge_request = project.all_pipelines.first.merge_request
+ expect(merge_request.source_branch).to eq('test source branch')
+ expect(merge_request.description).to eq('test merge request')
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
new file mode 100644
index 00000000000..9dac8e45ef9
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let_it_be(:policy) do
+ {
+ 'created_at' => '2019-12-13 13:45:04 UTC',
+ 'updated_at' => '2019-12-14 13:45:04 UTC',
+ 'next_run_at' => '2019-12-15 13:45:04 UTC',
+ 'name_regex' => 'test',
+ 'name_regex_keep' => 'regex_keep',
+ 'cadence' => '3month',
+ 'older_than' => '1month',
+ 'keep_n' => 100,
+ 'enabled' => true
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[policy, 0]]))
+ end
+
+ pipeline.run
+
+ policy.each_pair do |key, value|
+ expect(entity.project.container_expiration_policy.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
new file mode 100644
index 00000000000..12713f008bb
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:schedule_attributes) { {} }
+ let(:schedule) do
+ {
+ 'description' => 'test pipeline schedule',
+ 'cron' => '1 1 1 1 1',
+ 'cron_timezone' => 'UTC',
+ 'ref' => 'testref',
+ 'created_at' => '2016-06-13T15:02:47.967Z',
+ 'updated_at' => '2016-06-14T15:02:47.967Z'
+ }.merge(schedule_attributes)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ group.add_owner(user)
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [schedule]))
+ end
+
+ pipeline.run
+ end
+
+ it 'imports schedule into destination project' do
+ expect(project.pipeline_schedules.count).to eq(1)
+ pipeline_schedule = project.pipeline_schedules.first
+ schedule.each do |k, v|
+ expect(pipeline_schedule.send(k)).to eq(v)
+ end
+ end
+
+ context 'is active' do
+ let(:schedule_attributes) { { 'active' => true } }
+
+ it 'imports the schedule but active is false' do
+ expect(project.pipeline_schedules.first.active).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
new file mode 100644
index 00000000000..11c475318bb
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:extra) { {} }
+ let(:project_attributes) do
+ {
+ 'description' => 'description',
+ 'visibility_level' => 0,
+ 'archived' => false,
+ 'merge_requests_template' => 'test',
+ 'merge_requests_rebase_enabled' => true,
+ 'approvals_before_merge' => 0,
+ 'reset_approvals_on_push' => true,
+ 'merge_requests_ff_only_enabled' => true,
+ 'issues_template' => 'test',
+ 'shared_runners_enabled' => true,
+ 'build_coverage_regex' => 'build_coverage_regex',
+ 'build_allow_git_fetch' => true,
+ 'build_timeout' => 3600,
+ 'pending_delete' => false,
+ 'public_builds' => true,
+ 'last_repository_check_failed' => nil,
+ 'only_allow_merge_if_pipeline_succeeds' => true,
+ 'has_external_issue_tracker' => false,
+ 'request_access_enabled' => true,
+ 'has_external_wiki' => false,
+ 'ci_config_path' => nil,
+ 'only_allow_merge_if_all_discussions_are_resolved' => true,
+ 'printing_merge_request_link_enabled' => true,
+ 'auto_cancel_pending_pipelines' => 'enabled',
+ 'service_desk_enabled' => false,
+ 'delete_error' => nil,
+ 'disable_overriding_approvers_per_merge_request' => true,
+ 'resolve_outdated_diff_discussions' => true,
+ 'jobs_cache_index' => nil,
+ 'external_authorization_classification_label' => nil,
+ 'pages_https_only' => false,
+ 'merge_requests_author_approval' => false,
+ 'merge_requests_disable_committers_approval' => true,
+ 'require_password_to_approve' => true,
+ 'remove_source_branch_after_merge' => true,
+ 'autoclose_referenced_issues' => true,
+ 'suggestion_commit_message' => 'Test!'
+ }.merge(extra)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ before do
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
+
+ pipeline.run
+ end
+
+ it 'imports project attributes', :aggregate_failures do
+ project_attributes.each_pair do |key, value|
+ expect(project.public_send(key)).to eq(value)
+ end
+ end
+
+ context 'when project is archived' do
+ let(:extra) { { 'archived' => true } }
+
+ it 'sets project as archived' do
+ expect(project.archived).to eq(true)
+ end
+ end
+ end
+
+ describe '#extract' do
+ before do
+ file_download_service = instance_double("BulkImports::FileDownloadService")
+ file_decompression_service = instance_double("BulkImports::FileDecompressionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
+ dir: tmpdir,
+ filename: 'self.json.gz')
+ .and_return(file_download_service)
+
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(dir: tmpdir, filename: 'self.json.gz')
+ .and_return(file_decompression_service)
+
+ expect(file_download_service).to receive(:execute)
+ expect(file_decompression_service).to receive(:execute)
+ end
+
+ it 'downloads, decompresses & decodes json' do
+ allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
+ end
+
+ context 'when json parsing error occurs' do
+ it 'raises an error' do
+ allow(pipeline).to receive(:json_attributes).and_return("invalid")
+
+ expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
+ end
+ end
+ end
+
+ describe '#transform' do
+ it 'removes prohibited attributes from hash' do
+ input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
+
+ expect(Gitlab::ImportExport::AttributeCleaner).to receive(:clean).and_call_original
+
+ expect(pipeline.transform(context, input)).to eq({ 'description' => 'description' })
+ end
+ end
+
+ describe '#load' do
+ it 'assigns attributes, drops visibility and reconciles shared runner setting' do
+ expect(project).to receive(:assign_attributes).with(project_attributes)
+ expect(project).to receive(:reconcile_shared_runners_setting!)
+ expect(project).to receive(:drop_visibility_level!)
+ expect(project).to receive(:save!)
+
+ pipeline.load(context, project_attributes)
+ end
+ end
+
+ describe '#json_attributes' do
+ it 'reads raw json from file' do
+ filepath = File.join(tmpdir, 'self.json')
+
+ FileUtils.touch(filepath)
+ expect_file_read(filepath)
+
+ pipeline.json_attributes
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb
new file mode 100644
index 00000000000..1f0defdd20c
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ProjectFeaturePipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let_it_be(:project_feature) do
+ {
+ "builds_access_level": 10,
+ "wiki_access_level": 10,
+ "issues_access_level": 10,
+ "merge_requests_access_level": 10,
+ "snippets_access_level": 10,
+ "repository_access_level": 10,
+ "pages_access_level": 10,
+ "forking_access_level": 10,
+ "metrics_dashboard_access_level": 10,
+ "operations_access_level": 10,
+ "analytics_access_level": 10,
+ "security_and_compliance_access_level": 10,
+ "container_registry_access_level": 10,
+ "updated_at": "2016-09-23T11:58:28.000Z",
+ "created_at": "2014-12-26T09:26:45.000Z"
+ }
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[project_feature, 0]]))
+ end
+
+ pipeline.run
+
+ project_feature.each_pair do |key, value|
+ expect(entity.project.project_feature.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
index 583485faf8d..38b22538e70 100644
--- a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb
@@ -47,6 +47,17 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do
end
end
+ context 'project has no repository' do
+ let(:project_data) { { 'httpUrlToRepo' => '' } }
+
+ it 'skips repository import' do
+ expect(context.portable).not_to receive(:ensure_repository)
+ expect(context.portable.repository).not_to receive(:fetch_as_mirror)
+
+ pipeline.run
+ end
+ end
+
context 'blocked local networks' do
let(:project_data) { { 'httpUrlToRepo' => 'http://localhost/foo.git' } }
diff --git a/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb
new file mode 100644
index 00000000000..2dfa036fc48
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+ let_it_be(:setting) { { 'issue_template_key' => 'test', 'project_key' => 'key' } }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports project feature', :aggregate_failures do
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[setting, 0]]))
+ end
+
+ pipeline.run
+
+ setting.each_pair do |key, value|
+ expect(entity.project.service_desk_setting.public_send(key)).to eq(value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
new file mode 100644
index 00000000000..dae879de998
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:snippet_attributes) { {} }
+ let(:exported_snippet) do
+ {
+ 'id' => 25,
+ 'title' => 'Snippet with 2 files',
+ 'content' => 'content',
+ 'author_id' => 22,
+ 'project_id' => 6,
+ 'created_at' => '2021-10-28T20:21:59.712Z',
+ 'updated_at' => '2021-10-28T20:31:10.408Z',
+ 'file_name' => 'galactic_empire.rb',
+ 'visibility_level' => 0,
+ 'description' => 'How to track your Galactic armies.'
+ }.merge(snippet_attributes)
+ end
+
+ subject(:pipeline) { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ group.add_owner(user)
+ snippet_with_index = [exported_snippet.dup, 0]
+
+ allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [snippet_with_index]))
+ end
+
+ pipeline.run
+ end
+
+ it 'imports snippet into destination project' do
+ imported_snippet = project.snippets.last
+
+ expect(imported_snippet).to have_attributes(
+ title: exported_snippet['title'],
+ content: exported_snippet['content'],
+ author_id: user.id,
+ created_at: DateTime.parse(exported_snippet['created_at']),
+ updated_at: DateTime.parse(exported_snippet['updated_at']),
+ file_name: exported_snippet['file_name'],
+ visibility_level: exported_snippet['visibility_level'])
+ end
+
+ context 'with award_emoji' do
+ let(:snippet_attributes) { { 'award_emoji' => [expected_award] } }
+ let(:expected_award) do
+ {
+ 'id' => 580,
+ 'name' => 'rocket',
+ 'user_id' => 1,
+ 'awardable_type' => 'Snippet',
+ 'created_at' => '2021-10-28T20:30:25.802Z',
+ 'updated_at' => '2021-10-28T20:30:25.802Z'
+ }
+ end
+
+ it 'restores the award_emoji' do
+ snippet_award = project.snippets.first.award_emoji.first
+
+ expect(snippet_award).to have_attributes(
+ name: expected_award['name'],
+ user_id: user.id,
+ awardable_type: expected_award['awardable_type'],
+ created_at: DateTime.parse(expected_award['created_at']),
+ updated_at: DateTime.parse(expected_award['updated_at']))
+ end
+ end
+
+ context 'with notes', :freeze_time do
+ # To properly emulate a fixture that is expected to be read from a file, we dump a json
+ # object, then parse it right away. We expected that some attrs like Datetimes be
+ # converted to Strings.
+ let(:exported_snippet) { Gitlab::Json.parse(note.noteable.attributes.merge('notes' => notes).to_json) }
+ let(:note) { create(:note_on_project_snippet, :with_attachment) }
+ let(:notes) { [note.attributes.merge('author' => { 'name' => note.author.name })] }
+
+ it 'restores the notes' do
+ snippet_note = project.snippets.last.notes.first
+ author_name = note.author.name
+ note_updated_at = exported_snippet['notes'].first['updated_at'].split('.').first
+
+ expect(snippet_note).to have_attributes(
+ note: note.note + "\n\n *By #{author_name} on #{note_updated_at} (imported from GitLab)*",
+ noteable_type: note.noteable_type,
+ author_id: user.id,
+ updated_at: note.updated_at,
+ line_code: note.line_code,
+ commit_id: note.commit_id,
+ system: note.system,
+ st_diff: note.st_diff,
+ updated_by_id: user.id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
new file mode 100644
index 00000000000..9897e74ec7b
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:bulk_import) { create(:bulk_import, user: user) }
+ let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+ let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z")}
+ let(:entity) do
+ create(
+ :bulk_import_entity,
+ :project_entity,
+ project: project,
+ bulk_import: bulk_import_configuration.bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: project.full_path
+ )
+ end
+
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ let(:http_url_to_repo) { 'https://example.com/foo/bar/snippets/42.git' }
+ let(:data) do
+ [
+ {
+ 'title' => matched_snippet.title,
+ 'httpUrlToRepo' => http_url_to_repo,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ let(:page_info) do
+ {
+ 'next_page' => 'eyJpZCI6IjIyMDA2OTYifQ',
+ 'has_next_page' => false
+ }
+ end
+
+ let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) }
+
+ describe 'extractor' do
+ it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do
+ expect(described_class.get_extractor).to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: {
+ query: BulkImports::Projects::Graphql::GetSnippetRepositoryQuery
+ })
+ end
+ end
+
+ describe '#run' do
+ let(:validation_response) { double(Hash, 'error?': false) }
+
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(extracted_data)
+ end
+
+ allow_next_instance_of(Snippets::RepositoryValidationService) do |repository_validation|
+ allow(repository_validation).to receive(:execute).and_return(validation_response)
+ end
+ end
+
+ shared_examples 'skippable snippet' do
+ it 'does not create snippet repo' do
+ pipeline.run
+
+ expect(Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists?).to be false
+ end
+ end
+
+ context 'when a snippet is not matched' do
+ let(:data) do
+ [
+ {
+ 'title' => 'unmatched title',
+ 'httpUrlToRepo' => http_url_to_repo,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when httpUrlToRepo is empty' do
+ let(:data) do
+ [
+ {
+ 'title' => matched_snippet.title,
+ 'createdAt' => matched_snippet.created_at.to_s
+ }
+ ]
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when a snippet matches' do
+ context 'when snippet url is valid' do
+ it 'creates snippet repo' do
+ expect { pipeline.run }
+ .to change { Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists? }.to true
+ end
+
+ it 'updates snippets statistics' do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_as_mirror)
+ end
+
+ service = double(Snippets::UpdateStatisticsService)
+
+ expect(Snippets::UpdateStatisticsService).to receive(:new).with(kind_of(Snippet)).and_return(service)
+ expect(service).to receive(:execute)
+
+ pipeline.run
+ end
+
+ it 'fetches snippet repo from url' do
+ expect_next_instance_of(Repository) do |repository|
+ expect(repository)
+ .to receive(:fetch_as_mirror)
+ .with("https://oauth2:#{bulk_import_configuration.access_token}@example.com/foo/bar/snippets/42.git")
+ end
+
+ pipeline.run
+ end
+ end
+
+ context 'when url is invalid' do
+ let(:http_url_to_repo) { 'http://0.0.0.0' }
+
+ it_behaves_like 'skippable snippet'
+ end
+
+ context 'when snippet is invalid' do
+ let(:validation_response) { double(Hash, 'error?': true) }
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:fetch_as_mirror)
+ end
+ end
+
+ it 'does not leave a hanging SnippetRepository behind' do
+ pipeline.run
+
+ expect(SnippetRepository.where(snippet_id: matched_snippet.id).exists?).to be false
+ end
+
+ it 'does not call UpdateStatisticsService' do
+ expect(Snippets::UpdateStatisticsService).not_to receive(:new)
+
+ pipeline.run
+ end
+
+ it_behaves_like 'skippable snippet'
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index e7670085f60..81cbdcae9d1 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,20 +2,32 @@
require 'spec_helper'
+# Any new stages must be added to
+# `ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb` as well.
RSpec.describe BulkImports::Projects::Stage do
let(:pipelines) do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Projects::Pipelines::RepositoryPipeline],
+ [1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
[2, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::MilestonesPipeline],
+ [2, BulkImports::Common::Pipelines::BadgesPipeline],
[3, BulkImports::Projects::Pipelines::IssuesPipeline],
+ [3, BulkImports::Projects::Pipelines::SnippetsPipeline],
+ [4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
[4, BulkImports::Common::Pipelines::BoardsPipeline],
[4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
[4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline],
+ [4, BulkImports::Projects::Pipelines::CiPipelinesPipeline],
+ [4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
+ [4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
+ [4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
+ [5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
+ [5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]
]
end