Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/api/merge_requests.rb2
-rw-r--r--lib/api/projects.rb4
-rw-r--r--lib/backup/repository.rb171
-rw-r--r--lib/bitbucket_server/client.rb71
-rw-r--r--lib/bitbucket_server/collection.rb23
-rw-r--r--lib/bitbucket_server/connection.rb122
-rw-r--r--lib/bitbucket_server/page.rb36
-rw-r--r--lib/bitbucket_server/paginator.rb38
-rw-r--r--lib/bitbucket_server/representation/activity.rb71
-rw-r--r--lib/bitbucket_server/representation/base.rb21
-rw-r--r--lib/bitbucket_server/representation/comment.rb130
-rw-r--r--lib/bitbucket_server/representation/pull_request.rb76
-rw-r--r--lib/bitbucket_server/representation/pull_request_comment.rb122
-rw-r--r--lib/bitbucket_server/representation/repo.rb69
-rw-r--r--lib/gitlab/auth/activity.rb9
-rw-r--r--lib/gitlab/auth/blocked_user_tracker.rb55
-rw-r--r--lib/gitlab/background_migration.rb6
-rw-r--r--lib/gitlab/bitbucket_server_import/importer.rb327
-rw-r--r--lib/gitlab/bitbucket_server_import/project_creator.rb36
-rw-r--r--lib/gitlab/checks/lfs_integrity.rb5
-rw-r--r--lib/gitlab/ci/build/artifacts/gzip_file_adapter.rb46
-rw-r--r--lib/gitlab/ci/parsers.rb9
-rw-r--r--lib/gitlab/ci/parsers/junit.rb69
-rw-r--r--lib/gitlab/ci/reports/test_case.rb32
-rw-r--r--lib/gitlab/ci/reports/test_reports.rb39
-rw-r--r--lib/gitlab/ci/reports/test_reports_comparer.rb38
-rw-r--r--lib/gitlab/ci/reports/test_suite.rb54
-rw-r--r--lib/gitlab/ci/reports/test_suite_comparer.rb57
-rw-r--r--lib/gitlab/cleanup/project_uploads.rb2
-rw-r--r--lib/gitlab/database/migration_helpers.rb4
-rw-r--r--lib/gitlab/git/repository.rb11
-rw-r--r--lib/gitlab/gitaly_client/ref_service.rb19
-rw-r--r--lib/gitlab/hashed_storage/migrator.rb2
-rw-r--r--lib/gitlab/import_export/command_line_util.rb15
-rw-r--r--lib/gitlab/import_export/file_importer.rb24
-rw-r--r--lib/gitlab/import_export/importer.rb12
-rw-r--r--lib/gitlab/import_export/uploads_manager.rb5
-rw-r--r--lib/gitlab/import_sources.rb19
-rw-r--r--lib/gitlab/kubernetes/helm.rb2
-rw-r--r--lib/gitlab/regex.rb4
-rw-r--r--lib/gitlab/template_helper.rb14
41 files changed, 1635 insertions, 236 deletions
diff --git a/lib/api/merge_requests.rb b/lib/api/merge_requests.rb
index 2621c9f8fc2..abad418771c 100644
--- a/lib/api/merge_requests.rb
+++ b/lib/api/merge_requests.rb
@@ -380,7 +380,7 @@ module API
end
get ':id/merge_requests/:merge_request_iid/closes_issues' do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
- issues = ::Kaminari.paginate_array(merge_request.closes_issues(current_user))
+ issues = ::Kaminari.paginate_array(merge_request.visible_closing_issues_for(current_user))
issues = paginate(issues)
external_issues, internal_issues = issues.partition { |issue| issue.is_a?(ExternalIssue) }
diff --git a/lib/api/projects.rb b/lib/api/projects.rb
index 7adde79d6c3..5738bf220c6 100644
--- a/lib/api/projects.rb
+++ b/lib/api/projects.rb
@@ -321,7 +321,7 @@ module API
post ':id/archive' do
authorize!(:archive_project, user_project)
- user_project.archive!
+ ::Projects::UpdateService.new(user_project, current_user, archived: true).execute
present user_project, with: Entities::Project
end
@@ -332,7 +332,7 @@ module API
post ':id/unarchive' do
authorize!(:archive_project, user_project)
- user_project.unarchive!
+ ::Projects::UpdateService.new(@project, current_user, archived: false).execute
present user_project, with: Entities::Project
end
diff --git a/lib/backup/repository.rb b/lib/backup/repository.rb
index af762db517c..906ed498026 100644
--- a/lib/backup/repository.rb
+++ b/lib/backup/repository.rb
@@ -1,10 +1,7 @@
require 'yaml'
-require_relative 'helper'
module Backup
class Repository
- include Backup::Helper
-
attr_reader :progress
def initialize(progress)
@@ -42,131 +39,36 @@ module Backup
end
def prepare_directories
- Gitlab.config.repositories.storages.each do |name, repository_storage|
- delete_all_repositories(name, repository_storage)
+ Gitlab.config.repositories.storages.each do |name, _repository_storage|
+ Gitlab::GitalyClient::StorageService.new(name).delete_all_repositories
end
end
def backup_project(project)
- gitaly_migrate(:repository_backup, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
- if is_enabled
- backup_project_gitaly(project)
- else
- backup_project_local(project)
- end
- end
-
- backup_custom_hooks(project)
- rescue => e
- progress_warn(project, e, 'Failed to backup repo')
- end
-
- def backup_project_gitaly(project)
path_to_project_bundle = path_to_bundle(project)
Gitlab::GitalyClient::RepositoryService.new(project.repository)
.create_bundle(path_to_project_bundle)
- end
-
- def backup_project_local(project)
- path_to_project_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- path_to_repo(project)
- end
-
- path_to_project_bundle = path_to_bundle(project)
-
- cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{path_to_project_repo} bundle create #{path_to_project_bundle} --all)
- output, status = Gitlab::Popen.popen(cmd)
- progress_warn(project, cmd.join(' '), output) unless status.zero?
- end
-
- def delete_all_repositories(name, repository_storage)
- gitaly_migrate(:delete_all_repositories, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
- if is_enabled
- Gitlab::GitalyClient::StorageService.new(name).delete_all_repositories
- else
- local_delete_all_repositories(name, repository_storage)
- end
- end
- end
-
- def local_delete_all_repositories(name, repository_storage)
- path = repository_storage.legacy_disk_path
- return unless File.exist?(path)
-
- bk_repos_path = File.join(Gitlab.config.backup.path, "tmp", "#{name}-repositories.old." + Time.now.to_i.to_s)
- FileUtils.mkdir_p(bk_repos_path, mode: 0700)
- files = Dir.glob(File.join(path, "*"), File::FNM_DOTMATCH) - [File.join(path, "."), File.join(path, "..")]
-
- begin
- FileUtils.mv(files, bk_repos_path)
- rescue Errno::EACCES
- access_denied_error(path)
- rescue Errno::EBUSY
- resource_busy_error(path)
- end
- end
- def local_restore_custom_hooks(project, dir)
- path_to_project_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- path_to_repo(project)
- end
- cmd = %W(tar -xf #{path_to_tars(project, dir)} -C #{path_to_project_repo} #{dir})
- output, status = Gitlab::Popen.popen(cmd)
- unless status.zero?
- progress_warn(project, cmd.join(' '), output)
- end
- end
-
- def gitaly_restore_custom_hooks(project, dir)
- custom_hooks_path = path_to_tars(project, dir)
- Gitlab::GitalyClient::RepositoryService.new(project.repository)
- .restore_custom_hooks(custom_hooks_path)
+ backup_custom_hooks(project)
+ rescue => e
+ progress_warn(project, e, 'Failed to backup repo')
end
- def local_backup_custom_hooks(project)
- in_path(path_to_tars(project)) do |dir|
- path_to_project_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- path_to_repo(project)
- end
- break unless File.exist?(File.join(path_to_project_repo, dir))
-
- FileUtils.mkdir_p(path_to_tars(project))
- cmd = %W(tar -cf #{path_to_tars(project, dir)} -c #{path_to_project_repo} #{dir})
- output, status = Gitlab::Popen.popen(cmd)
-
- unless status.zero?
- progress_warn(project, cmd.join(' '), output)
- end
- end
- end
+ def backup_custom_hooks(project)
+ FileUtils.mkdir_p(project_backup_path(project))
- def gitaly_backup_custom_hooks(project)
- FileUtils.mkdir_p(path_to_tars(project))
- custom_hooks_path = path_to_tars(project, 'custom_hooks')
+ custom_hooks_path = custom_hooks_tar(project)
Gitlab::GitalyClient::RepositoryService.new(project.repository)
.backup_custom_hooks(custom_hooks_path)
end
- def backup_custom_hooks(project)
- gitaly_migrate(:backup_custom_hooks, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
- if is_enabled
- gitaly_backup_custom_hooks(project)
- else
- local_backup_custom_hooks(project)
- end
- end
- end
-
def restore_custom_hooks(project)
- in_path(path_to_tars(project)) do |dir|
- gitaly_migrate(:restore_custom_hooks, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
- if is_enabled
- gitaly_restore_custom_hooks(project, dir)
- else
- local_restore_custom_hooks(project, dir)
- end
- end
- end
+ return unless Dir.exist?(project_backup_path(project))
+ return if Dir.glob("#{project_backup_path(project)}/custom_hooks*").none?
+
+ custom_hooks_path = custom_hooks_tar(project)
+ Gitlab::GitalyClient::RepositoryService.new(project.repository)
+ .restore_custom_hooks(custom_hooks_path)
end
def restore
@@ -181,7 +83,8 @@ module Backup
restore_repo_success = nil
if File.exist?(path_to_project_bundle)
begin
- project.repository.create_from_bundle path_to_project_bundle
+ project.repository.create_from_bundle(path_to_project_bundle)
+ restore_custom_hooks(project)
restore_repo_success = true
rescue => e
restore_repo_success = false
@@ -197,8 +100,6 @@ module Backup
progress.puts "[Failed] restoring #{project.full_path} repository".color(:red)
end
- restore_custom_hooks(project)
-
wiki = ProjectWiki.new(project)
path_to_wiki_bundle = path_to_bundle(wiki)
@@ -219,48 +120,28 @@ module Backup
protected
- def path_to_repo(project)
- project.repository.path_to_repo
- end
-
def path_to_bundle(project)
File.join(backup_repos_path, project.disk_path + '.bundle')
end
- def path_to_tars(project, dir = nil)
- path = File.join(backup_repos_path, project.disk_path)
+ def project_backup_path(project)
+ File.join(backup_repos_path, project.disk_path)
+ end
- if dir
- File.join(path, "#{dir}.tar")
- else
- path
- end
+ def custom_hooks_tar(project)
+ File.join(project_backup_path(project), "custom_hooks.tar")
end
def backup_repos_path
File.join(Gitlab.config.backup.path, 'repositories')
end
- def in_path(path)
- return unless Dir.exist?(path)
-
- dir_entries = Dir.entries(path)
-
- if dir_entries.include?('custom_hooks') || dir_entries.include?('custom_hooks.tar')
- yield('custom_hooks')
- end
- end
-
def prepare
FileUtils.rm_rf(backup_repos_path)
FileUtils.mkdir_p(Gitlab.config.backup.path)
FileUtils.mkdir(backup_repos_path, mode: 0700)
end
- def silent
- { err: '/dev/null', out: '/dev/null' }
- end
-
private
def progress_warn(project, cmd, output)
@@ -273,18 +154,8 @@ module Backup
project_or_wiki.repository.empty?
end
- def repository_storage_paths_args
- Gitlab.config.repositories.storages.values.map { |rs| rs.legacy_disk_path }
- end
-
def display_repo_path(project)
project.hashed_storage?(:repository) ? "#{project.full_path} (#{project.disk_path})" : project.full_path
end
-
- def gitaly_migrate(method, status: Gitlab::GitalyClient::MigrationStatus::OPT_IN, &block)
- Gitlab::GitalyClient.migrate(method, status: status, &block)
- rescue GRPC::NotFound, GRPC::BadStatus => e
- raise Error, e
- end
end
end
diff --git a/lib/bitbucket_server/client.rb b/lib/bitbucket_server/client.rb
new file mode 100644
index 00000000000..15e59f93141
--- /dev/null
+++ b/lib/bitbucket_server/client.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ class Client
+ attr_reader :connection
+
+ ServerError = Class.new(StandardError)
+
+ SERVER_ERRORS = [SocketError,
+ OpenSSL::SSL::SSLError,
+ Errno::ECONNRESET,
+ Errno::ECONNREFUSED,
+ Errno::EHOSTUNREACH,
+ Net::OpenTimeout,
+ Net::ReadTimeout,
+ Gitlab::HTTP::BlockedUrlError,
+ BitbucketServer::Connection::ConnectionError].freeze
+
+ def initialize(options = {})
+ @connection = Connection.new(options)
+ end
+
+ def pull_requests(project_key, repo)
+ path = "/projects/#{project_key}/repos/#{repo}/pull-requests?state=ALL"
+ get_collection(path, :pull_request)
+ end
+
+ def activities(project_key, repo, pull_request_id)
+ path = "/projects/#{project_key}/repos/#{repo}/pull-requests/#{pull_request_id}/activities"
+ get_collection(path, :activity)
+ end
+
+ def repo(project, repo_name)
+ parsed_response = connection.get("/projects/#{project}/repos/#{repo_name}")
+ BitbucketServer::Representation::Repo.new(parsed_response)
+ end
+
+ def repos
+ path = "/repos"
+ get_collection(path, :repo)
+ end
+
+ def create_branch(project_key, repo, branch_name, sha)
+ payload = {
+ name: branch_name,
+ startPoint: sha,
+ message: 'GitLab temporary branch for import'
+ }
+
+ connection.post("/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
+ end
+
+ def delete_branch(project_key, repo, branch_name, sha)
+ payload = {
+ name: Gitlab::Git::BRANCH_REF_PREFIX + branch_name,
+ dryRun: false
+ }
+
+ connection.delete(:branches, "/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
+ end
+
+ private
+
+ def get_collection(path, type)
+ paginator = BitbucketServer::Paginator.new(connection, Addressable::URI.escape(path), type)
+ BitbucketServer::Collection.new(paginator)
+ rescue *SERVER_ERRORS => e
+ raise ServerError, e
+ end
+ end
+end
diff --git a/lib/bitbucket_server/collection.rb b/lib/bitbucket_server/collection.rb
new file mode 100644
index 00000000000..b50c5dde352
--- /dev/null
+++ b/lib/bitbucket_server/collection.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ class Collection < Enumerator
+ def initialize(paginator)
+ super() do |yielder|
+ loop do
+ paginator.items.each { |item| yielder << item }
+ end
+ end
+
+ lazy
+ end
+
+ def method_missing(method, *args)
+ return super unless self.respond_to?(method)
+
+ self.__send__(method, *args) do |item| # rubocop:disable GitlabSecurity/PublicSend
+ block_given? ? yield(item) : item
+ end
+ end
+ end
+end
diff --git a/lib/bitbucket_server/connection.rb b/lib/bitbucket_server/connection.rb
new file mode 100644
index 00000000000..45a437844bd
--- /dev/null
+++ b/lib/bitbucket_server/connection.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ class Connection
+ include ActionView::Helpers::SanitizeHelper
+
+ DEFAULT_API_VERSION = '1.0'
+ SEPARATOR = '/'
+
+ attr_reader :api_version, :base_uri, :username, :token
+
+ ConnectionError = Class.new(StandardError)
+
+ def initialize(options = {})
+ @api_version = options.fetch(:api_version, DEFAULT_API_VERSION)
+ @base_uri = options[:base_uri]
+ @username = options[:user]
+ @token = options[:password]
+ end
+
+ def get(path, extra_query = {})
+ response = Gitlab::HTTP.get(build_url(path),
+ basic_auth: auth,
+ headers: accept_headers,
+ query: extra_query)
+
+ check_errors!(response)
+
+ response.parsed_response
+ end
+
+ def post(path, body)
+ response = Gitlab::HTTP.post(build_url(path),
+ basic_auth: auth,
+ headers: post_headers,
+ body: body)
+
+ check_errors!(response)
+
+ response.parsed_response
+ end
+
+ # We need to support two different APIs for deletion:
+ #
+ # /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/branches/default
+ # /rest/branch-utils/1.0/projects/{projectKey}/repos/{repositorySlug}/branches
+ def delete(resource, path, body)
+ url = delete_url(resource, path)
+
+ response = Gitlab::HTTP.delete(url,
+ basic_auth: auth,
+ headers: post_headers,
+ body: body)
+
+ check_errors!(response)
+
+ response.parsed_response
+ end
+
+ private
+
+ def check_errors!(response)
+ raise ConnectionError, "Response is not valid JSON" unless response.parsed_response.is_a?(Hash)
+
+ return if response.code >= 200 && response.code < 300
+
+ details = sanitize(response.parsed_response.dig('errors', 0, 'message'))
+ message = "Error #{response.code}"
+ message += ": #{details}" if details
+
+ raise ConnectionError, message
+ rescue JSON::ParserError
+ raise ConnectionError, "Unable to parse the server response as JSON"
+ end
+
+ def auth
+ @auth ||= { username: username, password: token }
+ end
+
+ def accept_headers
+ @accept_headers ||= { 'Accept' => 'application/json' }
+ end
+
+ def post_headers
+ @post_headers ||= accept_headers.merge({ 'Content-Type' => 'application/json' })
+ end
+
+ def build_url(path)
+ return path if path.starts_with?(root_url)
+
+ url_join_paths(root_url, path)
+ end
+
+ def root_url
+ url_join_paths(base_uri, "/rest/api/#{api_version}")
+ end
+
+ def delete_url(resource, path)
+ if resource == :branches
+ url_join_paths(base_uri, "/rest/branch-utils/#{api_version}#{path}")
+ else
+ build_url(path)
+ end
+ end
+
+ # URI.join is stupid in that slashes are important:
+ #
+ # # URI.join('http://example.com/subpath', 'hello')
+ # => http://example.com/hello
+ #
+ # We really want http://example.com/subpath/hello
+ #
+ def url_join_paths(*paths)
+ paths.map { |path| strip_slashes(path) }.join(SEPARATOR)
+ end
+
+ def strip_slashes(path)
+ path = path[1..-1] if path.starts_with?(SEPARATOR)
+ path.chomp(SEPARATOR)
+ end
+ end
+end
diff --git a/lib/bitbucket_server/page.rb b/lib/bitbucket_server/page.rb
new file mode 100644
index 00000000000..5d9a3168876
--- /dev/null
+++ b/lib/bitbucket_server/page.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ class Page
+ attr_reader :attrs, :items
+
+ def initialize(raw, type)
+ @attrs = parse_attrs(raw)
+ @items = parse_values(raw, representation_class(type))
+ end
+
+ def next?
+ !attrs.fetch(:isLastPage, true)
+ end
+
+ def next
+ attrs.fetch(:nextPageStart)
+ end
+
+ private
+
+ def parse_attrs(raw)
+ raw.slice('size', 'nextPageStart', 'isLastPage').symbolize_keys
+ end
+
+ def parse_values(raw, bitbucket_rep_class)
+ return [] unless raw['values'] && raw['values'].is_a?(Array)
+
+ bitbucket_rep_class.decorate(raw['values'])
+ end
+
+ def representation_class(type)
+ BitbucketServer::Representation.const_get(type.to_s.camelize)
+ end
+ end
+end
diff --git a/lib/bitbucket_server/paginator.rb b/lib/bitbucket_server/paginator.rb
new file mode 100644
index 00000000000..c351fb2f11f
--- /dev/null
+++ b/lib/bitbucket_server/paginator.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ class Paginator
+ PAGE_LENGTH = 25
+
+ def initialize(connection, url, type)
+ @connection = connection
+ @type = type
+ @url = url
+ @page = nil
+ end
+
+ def items
+ raise StopIteration unless has_next_page?
+
+ @page = fetch_next_page
+ @page.items
+ end
+
+ private
+
+ attr_reader :connection, :page, :url, :type
+
+ def has_next_page?
+ page.nil? || page.next?
+ end
+
+ def next_offset
+ page.nil? ? 0 : page.next
+ end
+
+ def fetch_next_page
+ parsed_response = connection.get(@url, start: next_offset, limit: PAGE_LENGTH)
+ Page.new(parsed_response, type)
+ end
+ end
+end
diff --git a/lib/bitbucket_server/representation/activity.rb b/lib/bitbucket_server/representation/activity.rb
new file mode 100644
index 00000000000..08bf30a5d1e
--- /dev/null
+++ b/lib/bitbucket_server/representation/activity.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ module Representation
+ class Activity < Representation::Base
+ def comment?
+ action == 'COMMENTED'
+ end
+
+ def inline_comment?
+ !!(comment? && comment_anchor)
+ end
+
+ def comment
+ return unless comment?
+
+ @comment ||=
+ if inline_comment?
+ PullRequestComment.new(raw)
+ else
+ Comment.new(raw)
+ end
+ end
+
+ # TODO Move this into MergeEvent
+ def merge_event?
+ action == 'MERGED'
+ end
+
+ def committer_user
+ commit.dig('committer', 'displayName')
+ end
+
+ def committer_email
+ commit.dig('committer', 'emailAddress')
+ end
+
+ def merge_timestamp
+ timestamp = commit['committerTimestamp']
+
+ self.class.convert_timestamp(timestamp)
+ end
+
+ def merge_commit
+ commit['id']
+ end
+
+ def created_at
+ self.class.convert_timestamp(created_date)
+ end
+
+ private
+
+ def commit
+ raw.fetch('commit', {})
+ end
+
+ def action
+ raw['action']
+ end
+
+ def comment_anchor
+ raw['commentAnchor']
+ end
+
+ def created_date
+ raw['createdDate']
+ end
+ end
+ end
+end
diff --git a/lib/bitbucket_server/representation/base.rb b/lib/bitbucket_server/representation/base.rb
new file mode 100644
index 00000000000..a1961bae6ef
--- /dev/null
+++ b/lib/bitbucket_server/representation/base.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ module Representation
+ class Base
+ attr_reader :raw
+
+ def initialize(raw)
+ @raw = raw
+ end
+
+ def self.decorate(entries)
+ entries.map { |entry| new(entry)}
+ end
+
+ def self.convert_timestamp(time_usec)
+ Time.at(time_usec / 1000) if time_usec.is_a?(Integer)
+ end
+ end
+ end
+end
diff --git a/lib/bitbucket_server/representation/comment.rb b/lib/bitbucket_server/representation/comment.rb
new file mode 100644
index 00000000000..99b97a3b181
--- /dev/null
+++ b/lib/bitbucket_server/representation/comment.rb
@@ -0,0 +1,130 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ module Representation
+ # A general comment with the structure:
+ # "comment": {
+ # "author": {
+ # "active": true,
+ # "displayName": "root",
+ # "emailAddress": "stanhu+bitbucket@gitlab.com",
+ # "id": 1,
+ # "links": {
+ # "self": [
+ # {
+ # "href": "http://localhost:7990/users/root"
+ # }
+ # ]
+ # },
+ # "name": "root",
+ # "slug": "root",
+ # "type": "NORMAL"
+ # }
+ # }
+ # }
+ class Comment < Representation::Base
+ attr_reader :parent_comment
+
+ CommentNode = Struct.new(:raw_comments, :parent)
+
+ def initialize(raw, parent_comment: nil)
+ super(raw)
+
+ @parent_comment = parent_comment
+ end
+
+ def id
+ raw_comment['id']
+ end
+
+ def author_username
+ author['displayName']
+ end
+
+ def author_email
+ author['emailAddress']
+ end
+
+ def note
+ raw_comment['text']
+ end
+
+ def created_at
+ self.class.convert_timestamp(created_date)
+ end
+
+ def updated_at
+ self.class.convert_timestamp(created_date)
+ end
+
+ # Bitbucket Server supports the ability to reply to any comment
+ # and created multiple threads. It represents these as a linked list
+ # of comments within comments. For example:
+ #
+ # "comments": [
+ # {
+ # "author" : ...
+ # "comments": [
+ # {
+ # "author": ...
+ #
+ # Since GitLab only supports a single thread, we flatten all these
+ # comments into a single discussion.
+ def comments
+ @comments ||= flatten_comments
+ end
+
+ private
+
+ # In order to provide context for each reply, we need to track
+ # the parent of each comment. This method works as follows:
+ #
+ # 1. Insert the root comment into the workset. The root element is the current note.
+ # 2. For each node in the workset:
+ # a. Examine if it has replies to that comment. If it does,
+ # insert that node into the workset.
+ # b. Parse that note into a Comment structure and add it to a flat list.
+ def flatten_comments
+ comments = raw_comment['comments']
+ workset =
+ if comments
+ [CommentNode.new(comments, self)]
+ else
+ []
+ end
+
+ all_comments = []
+
+ until workset.empty?
+ node = workset.pop
+ parent = node.parent
+
+ node.raw_comments.each do |comment|
+ new_comments = comment.delete('comments')
+ current_comment = Comment.new({ 'comment' => comment }, parent_comment: parent)
+ all_comments << current_comment
+ workset << CommentNode.new(new_comments, current_comment) if new_comments
+ end
+ end
+
+ all_comments
+ end
+
+ def raw_comment
+ raw.fetch('comment', {})
+ end
+
+ def author
+ raw_comment['author']
+ end
+
+ def created_date
+ raw_comment['createdDate']
+ end
+
+ def updated_date
+ raw_comment['updatedDate']
+ end
+ end
+ end
+end
diff --git a/lib/bitbucket_server/representation/pull_request.rb b/lib/bitbucket_server/representation/pull_request.rb
new file mode 100644
index 00000000000..c3e927d8de7
--- /dev/null
+++ b/lib/bitbucket_server/representation/pull_request.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ module Representation
+ class PullRequest < Representation::Base
+ def author
+ raw.dig('author', 'user', 'name')
+ end
+
+ def author_email
+ raw.dig('author', 'user', 'emailAddress')
+ end
+
+ def description
+ raw['description']
+ end
+
+ def iid
+ raw['id']
+ end
+
+ def state
+ case raw['state']
+ when 'MERGED'
+ 'merged'
+ when 'DECLINED'
+ 'closed'
+ else
+ 'opened'
+ end
+ end
+
+ def merged?
+ state == 'merged'
+ end
+
+ def created_at
+ self.class.convert_timestamp(created_date)
+ end
+
+ def updated_at
+ self.class.convert_timestamp(updated_date)
+ end
+
+ def title
+ raw['title']
+ end
+
+ def source_branch_name
+ raw.dig('fromRef', 'id')
+ end
+
+ def source_branch_sha
+ raw.dig('fromRef', 'latestCommit')
+ end
+
+ def target_branch_name
+ raw.dig('toRef', 'id')
+ end
+
+ def target_branch_sha
+ raw.dig('toRef', 'latestCommit')
+ end
+
+ private
+
+ def created_date
+ raw['createdDate']
+ end
+
+ def updated_date
+ raw['updatedDate']
+ end
+ end
+ end
+end
diff --git a/lib/bitbucket_server/representation/pull_request_comment.rb b/lib/bitbucket_server/representation/pull_request_comment.rb
new file mode 100644
index 00000000000..a2b3873a397
--- /dev/null
+++ b/lib/bitbucket_server/representation/pull_request_comment.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ module Representation
+ # An inline comment with the following structure that identifies
+ # the part of the diff:
+ #
+ # "commentAnchor": {
+ # "diffType": "EFFECTIVE",
+ # "fileType": "TO",
+ # "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
+ # "line": 1,
+ # "lineType": "ADDED",
+ # "orphaned": false,
+ # "path": "CHANGELOG.md",
+ # "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
+ # }
+ #
+ # More details in https://docs.atlassian.com/bitbucket-server/rest/5.12.0/bitbucket-rest.html.
+ class PullRequestComment < Comment
+ def from_sha
+ comment_anchor['fromHash']
+ end
+
+ def to_sha
+ comment_anchor['toHash']
+ end
+
+ def to?
+ file_type == 'TO'
+ end
+
+ def from?
+ file_type == 'FROM'
+ end
+
+ def added?
+ line_type == 'ADDED'
+ end
+
+ def removed?
+ line_type == 'REMOVED'
+ end
+
+ # There are three line comment types: added, removed, or context.
+ #
+ # 1. An added type means a new line was inserted, so there is no old position.
+ # 2. A removed type means a line was removed, so there is no new position.
+ # 3. A context type means the line was unmodified, so there is both a
+ # old and new position.
+ def new_pos
+ return if removed?
+ return unless line_position
+
+ line_position[1]
+ end
+
+ def old_pos
+ return if added?
+ return unless line_position
+
+ line_position[0]
+ end
+
+ def file_path
+ comment_anchor.fetch('path')
+ end
+
+ private
+
+ def file_type
+ comment_anchor['fileType']
+ end
+
+ def line_type
+ comment_anchor['lineType']
+ end
+
+ # Each comment contains the following information about the diff:
+ #
+ # hunks: [
+ # {
+ # segments: [
+ # {
+ # "lines": [
+ # {
+ # "commentIds": [ N ],
+ # "source": X,
+ # "destination": Y
+ # }, ...
+ # ] ....
+ #
+ # To determine the line position of a comment, we search all the lines
+ # entries until we find this comment ID.
+ def line_position
+ @line_position ||= diff_hunks.each do |hunk|
+ segments = hunk.fetch('segments', [])
+ segments.each do |segment|
+ lines = segment.fetch('lines', [])
+ lines.each do |line|
+ if line['commentIds']&.include?(id)
+ return [line['source'], line['destination']]
+ end
+ end
+ end
+ end
+ end
+
+ def comment_anchor
+ raw.fetch('commentAnchor', {})
+ end
+
+ def diff
+ raw.fetch('diff', {})
+ end
+
+ def diff_hunks
+ diff.fetch('hunks', [])
+ end
+ end
+ end
+end
diff --git a/lib/bitbucket_server/representation/repo.rb b/lib/bitbucket_server/representation/repo.rb
new file mode 100644
index 00000000000..6c494b79166
--- /dev/null
+++ b/lib/bitbucket_server/representation/repo.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+module BitbucketServer
+ module Representation
+ class Repo < Representation::Base
+ def initialize(raw)
+ super(raw)
+ end
+
+ def project_key
+ raw.dig('project', 'key')
+ end
+
+ def project_name
+ raw.dig('project', 'name')
+ end
+
+ def slug
+ raw['slug']
+ end
+
+ def browse_url
+ # The JSON reponse contains an array of 1 element. Not sure if there
+ # are cases where multiple links would be provided.
+ raw.dig('links', 'self').first.fetch('href')
+ end
+
+ def clone_url
+ raw['links']['clone'].find { |link| link['name'].starts_with?('http') }.fetch('href')
+ end
+
+ def description
+ project['description']
+ end
+
+ def full_name
+ "#{project_name}/#{name}"
+ end
+
+ def issues_enabled?
+ true
+ end
+
+ def name
+ raw['name']
+ end
+
+ def valid?
+ raw['scmId'] == 'git'
+ end
+
+ def visibility_level
+ if project['public']
+ Gitlab::VisibilityLevel::PUBLIC
+ else
+ Gitlab::VisibilityLevel::PRIVATE
+ end
+ end
+
+ def project
+ raw['project']
+ end
+
+ def to_s
+ full_name
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/activity.rb b/lib/gitlab/auth/activity.rb
index 9f84c578d4f..761f0819c60 100644
--- a/lib/gitlab/auth/activity.rb
+++ b/lib/gitlab/auth/activity.rb
@@ -18,8 +18,7 @@ module Gitlab
user_blocked: 'Counter of sign in attempts when user is blocked'
}.freeze
- def initialize(user, opts)
- @user = user
+ def initialize(opts)
@opts = opts
end
@@ -32,8 +31,6 @@ module Gitlab
when :invalid
self.class.user_password_invalid_counter_increment!
end
-
- self.class.user_blocked_counter_increment! if @user&.blocked?
end
def user_authenticated!
@@ -51,6 +48,10 @@ module Gitlab
end
end
+ def user_blocked!
+ self.class.user_blocked_counter_increment!
+ end
+
def user_session_destroyed!
self.class.user_session_destroyed_counter_increment!
end
diff --git a/lib/gitlab/auth/blocked_user_tracker.rb b/lib/gitlab/auth/blocked_user_tracker.rb
index b6d2adc834b..50712d7eac2 100644
--- a/lib/gitlab/auth/blocked_user_tracker.rb
+++ b/lib/gitlab/auth/blocked_user_tracker.rb
@@ -2,58 +2,21 @@
module Gitlab
module Auth
class BlockedUserTracker
- include Gitlab::Utils::StrongMemoize
-
- ACTIVE_RECORD_REQUEST_PARAMS = 'action_dispatch.request.request_parameters'
-
- def initialize(env)
- @env = env
- end
-
- def user_blocked?
- user&.blocked?
+ def initialize(user, auth)
+ @user = user
+ @auth = auth
end
- def user
- return unless has_user_blocked_message?
+ def log_activity!
+ return unless @user.blocked?
- strong_memoize(:user) do
- # Check for either LDAP or regular GitLab account logins
- login = @env.dig(ACTIVE_RECORD_REQUEST_PARAMS, 'username') ||
- @env.dig(ACTIVE_RECORD_REQUEST_PARAMS, 'user', 'login')
+ Gitlab::AppLogger.info <<~INFO
+ "Failed login for blocked user: user=#{@user.username} ip=#{@auth.request.ip}")
+ INFO
- User.by_login(login) if login.present?
- end
+ SystemHooksService.new.execute_hooks_for(@user, :failed_login)
rescue TypeError
end
-
- def log_blocked_user_activity!
- return unless user_blocked?
-
- Gitlab::AppLogger.info("Failed login for blocked user: user=#{user.username} ip=#{@env['REMOTE_ADDR']}")
- SystemHooksService.new.execute_hooks_for(user, :failed_login)
- true
- rescue TypeError
- end
-
- private
-
- ##
- # Devise calls User#active_for_authentication? on the User model and then
- # throws an exception to Warden with User#inactive_message:
- # https://github.com/plataformatec/devise/blob/v4.2.1/lib/devise/hooks/activatable.rb#L8
- #
- # Since Warden doesn't pass the user record to the failure handler, we
- # need to do a database lookup with the username. We can limit the
- # lookups to happen when the user was blocked by checking the inactive
- # message passed along by Warden.
- #
- def has_user_blocked_message?
- strong_memoize(:user_blocked_message) do
- message = @env.dig('warden.options', :message)
- message == User::BLOCKED_MESSAGE
- end
- end
end
end
end
diff --git a/lib/gitlab/background_migration.rb b/lib/gitlab/background_migration.rb
index d3f66877672..36c85dec544 100644
--- a/lib/gitlab/background_migration.rb
+++ b/lib/gitlab/background_migration.rb
@@ -46,7 +46,11 @@ module Gitlab
# arguments - The arguments to pass to the background migration's "perform"
# method.
def self.perform(class_name, arguments)
- const_get(class_name).new.perform(*arguments)
+ migration_class_for(class_name).new.perform(*arguments)
+ end
+
+ def self.migration_class_for(class_name)
+ const_get(class_name)
end
end
end
diff --git a/lib/gitlab/bitbucket_server_import/importer.rb b/lib/gitlab/bitbucket_server_import/importer.rb
new file mode 100644
index 00000000000..268d21a77d1
--- /dev/null
+++ b/lib/gitlab/bitbucket_server_import/importer.rb
@@ -0,0 +1,327 @@
+module Gitlab
+ module BitbucketServerImport
+ class Importer
+ include Gitlab::ShellAdapter
+ attr_reader :recover_missing_commits
+ attr_reader :project, :project_key, :repository_slug, :client, :errors, :users
+
+ REMOTE_NAME = 'bitbucket_server'.freeze
+ BATCH_SIZE = 100
+
+ TempBranch = Struct.new(:name, :sha)
+
+ def self.imports_repository?
+ true
+ end
+
+ def self.refmap
+ [:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head']
+ end
+
+ # Unlike GitHub, you can't grab the commit SHAs for pull requests that
+ # have been closed but not merged even though Bitbucket has these
+ # commits internally. We can recover these pull requests by creating a
+ # branch with the Bitbucket REST API, but by default we turn this
+ # behavior off.
+ def initialize(project, recover_missing_commits: false)
+ @project = project
+ @recover_missing_commits = recover_missing_commits
+ @project_key = project.import_data.data['project_key']
+ @repository_slug = project.import_data.data['repo_slug']
+ @client = BitbucketServer::Client.new(project.import_data.credentials)
+ @formatter = Gitlab::ImportFormatter.new
+ @errors = []
+ @users = {}
+ @temp_branches = []
+ end
+
+ def execute
+ import_repository
+ import_pull_requests
+ delete_temp_branches
+ handle_errors
+
+ true
+ end
+
+ private
+
+ def handle_errors
+ return unless errors.any?
+
+ project.update_column(:import_error, {
+ message: 'The remote data could not be fully imported.',
+ errors: errors
+ }.to_json)
+ end
+
+ def gitlab_user_id(email)
+ find_user_id(email) || project.creator_id
+ end
+
+ def find_user_id(email)
+ return nil unless email
+
+ return users[email] if users.key?(email)
+
+ user = User.find_by_any_email(email, confirmed: true)
+ users[email] = user&.id
+
+ user&.id
+ end
+
+ def repo
+ @repo ||= client.repo(project_key, repository_slug)
+ end
+
+ def sha_exists?(sha)
+ project.repository.commit(sha)
+ end
+
+ def temp_branch_name(pull_request, suffix)
+ "gitlab/import/pull-request/#{pull_request.iid}/#{suffix}"
+ end
+
+ # This method restores required SHAs that GitLab needs to create diffs
+ # into branch names as the following:
+ #
+ # gitlab/import/pull-request/N/{to,from}
+ def restore_branches(pull_requests)
+ shas_to_restore = []
+
+ pull_requests.each do |pull_request|
+ shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :from),
+ pull_request.source_branch_sha)
+ shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :to),
+ pull_request.target_branch_sha)
+ end
+
+ # Create the branches on the Bitbucket Server first
+ created_branches = restore_branch_shas(shas_to_restore)
+
+ @temp_branches += created_branches
+ # Now sync the repository so we get the new branches
+ import_repository unless created_branches.empty?
+ end
+
+ def restore_branch_shas(shas_to_restore)
+ shas_to_restore.each_with_object([]) do |temp_branch, branches_created|
+ branch_name = temp_branch.name
+ sha = temp_branch.sha
+
+ next if sha_exists?(sha)
+
+ begin
+ client.create_branch(project_key, repository_slug, branch_name, sha)
+ branches_created << temp_branch
+ rescue BitbucketServer::Connection::ConnectionError => e
+ Rails.logger.warn("BitbucketServerImporter: Unable to recreate branch for SHA #{sha}: #{e}")
+ end
+ end
+ end
+
+ def import_repository
+ project.ensure_repository
+ project.repository.fetch_as_mirror(project.import_url, refmap: self.class.refmap, remote_name: REMOTE_NAME)
+ rescue Gitlab::Shell::Error, Gitlab::Git::RepositoryMirroring::RemoteError => e
+ # Expire cache to prevent scenarios such as:
+ # 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
+ # 2. Retried import, repo is broken or not imported but +exists?+ still returns true
+ project.repository.expire_content_cache if project.repository_exists?
+
+ raise e.message
+ end
+
+ # Bitbucket Server keeps tracks of references for open pull requests in
+ # refs/heads/pull-requests, but closed and merged requests get moved
+ # into hidden internal refs under stash-refs/pull-requests. Unless the
+ # SHAs involved are at the tip of a branch or tag, there is no way to
+ # retrieve the server for those commits.
+ #
+ # To avoid losing history, we use the Bitbucket API to re-create the branch
+ # on the remote server. Then we have to issue a `git fetch` to download these
+ # branches.
+ def import_pull_requests
+ pull_requests = client.pull_requests(project_key, repository_slug).to_a
+
+ # Creating branches on the server and fetching the newly-created branches
+ # may take a number of network round-trips. Do this in batches so that we can
+ # avoid doing a git fetch for every new branch.
+ pull_requests.each_slice(BATCH_SIZE) do |batch|
+ restore_branches(batch) if recover_missing_commits
+
+ batch.each do |pull_request|
+ begin
+ import_bitbucket_pull_request(pull_request)
+ rescue StandardError => e
+ errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, trace: e.backtrace.join("\n"), raw_response: pull_request.raw }
+ end
+ end
+ end
+ end
+
+ def delete_temp_branches
+ @temp_branches.each do |branch|
+ begin
+ client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
+ project.repository.delete_branch(branch.name)
+ rescue BitbucketServer::Connection::ConnectionError => e
+ @errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
+ end
+ end
+ end
+
+ def import_bitbucket_pull_request(pull_request)
+ description = ''
+ description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author_email)
+ description += pull_request.description if pull_request.description
+
+ source_branch_sha = pull_request.source_branch_sha
+ target_branch_sha = pull_request.target_branch_sha
+ author_id = gitlab_user_id(pull_request.author_email)
+
+ attributes = {
+ iid: pull_request.iid,
+ title: pull_request.title,
+ description: description,
+ source_project: project,
+ source_branch: Gitlab::Git.ref_name(pull_request.source_branch_name),
+ source_branch_sha: source_branch_sha,
+ target_project: project,
+ target_branch: Gitlab::Git.ref_name(pull_request.target_branch_name),
+ target_branch_sha: target_branch_sha,
+ state: pull_request.state,
+ author_id: author_id,
+ assignee_id: nil,
+ created_at: pull_request.created_at,
+ updated_at: pull_request.updated_at
+ }
+
+ merge_request = project.merge_requests.create!(attributes)
+ import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
+ end
+
+ def import_pull_request_comments(pull_request, merge_request)
+ comments, other_activities = client.activities(project_key, repository_slug, pull_request.iid).partition(&:comment?)
+
+ merge_event = other_activities.find(&:merge_event?)
+ import_merge_event(merge_request, merge_event) if merge_event
+
+ inline_comments, pr_comments = comments.partition(&:inline_comment?)
+
+ import_inline_comments(inline_comments.map(&:comment), merge_request)
+ import_standalone_pr_comments(pr_comments.map(&:comment), merge_request)
+ end
+
+ def import_merge_event(merge_request, merge_event)
+ committer = merge_event.committer_email
+
+ user_id = gitlab_user_id(committer)
+ timestamp = merge_event.merge_timestamp
+ merge_request.update({ merge_commit_sha: merge_event.merge_commit })
+ metric = MergeRequest::Metrics.find_or_initialize_by(merge_request: merge_request)
+ metric.update(merged_by_id: user_id, merged_at: timestamp)
+ end
+
+ def import_inline_comments(inline_comments, merge_request)
+ inline_comments.each do |comment|
+ position = build_position(merge_request, comment)
+ parent = create_diff_note(merge_request, comment, position)
+
+ next unless parent&.persisted?
+
+ discussion_id = parent.discussion_id
+
+ comment.comments.each do |reply|
+ create_diff_note(merge_request, reply, position, discussion_id)
+ end
+ end
+ end
+
+ def create_diff_note(merge_request, comment, position, discussion_id = nil)
+ attributes = pull_request_comment_attributes(comment)
+ attributes.merge!(position: position, type: 'DiffNote')
+ attributes[:discussion_id] = discussion_id if discussion_id
+
+ note = merge_request.notes.build(attributes)
+
+ if note.valid?
+ note.save
+ return note
+ end
+
+ # Bitbucket Server supports the ability to comment on any line, not just the
+ # line in the diff. If we can't add the note as a DiffNote, fallback to creating
+ # a regular note.
+ create_fallback_diff_note(merge_request, comment, position)
+ rescue StandardError => e
+ errors << { type: :pull_request, id: comment.id, errors: e.message }
+ nil
+ end
+
+ def create_fallback_diff_note(merge_request, comment, position)
+ attributes = pull_request_comment_attributes(comment)
+ note = "*Comment on"
+
+ note += " #{position.old_path}:#{position.old_line} -->" if position.old_line
+ note += " #{position.new_path}:#{position.new_line}" if position.new_line
+ note += "*\n\n#{comment.note}"
+
+ attributes[:note] = note
+ merge_request.notes.create!(attributes)
+ end
+
+ def build_position(merge_request, pr_comment)
+ params = {
+ diff_refs: merge_request.diff_refs,
+ old_path: pr_comment.file_path,
+ new_path: pr_comment.file_path,
+ old_line: pr_comment.old_pos,
+ new_line: pr_comment.new_pos
+ }
+
+ Gitlab::Diff::Position.new(params)
+ end
+
+ def import_standalone_pr_comments(pr_comments, merge_request)
+ pr_comments.each do |comment|
+ begin
+ merge_request.notes.create!(pull_request_comment_attributes(comment))
+
+ comment.comments.each do |replies|
+ merge_request.notes.create!(pull_request_comment_attributes(replies))
+ end
+ rescue StandardError => e
+ errors << { type: :pull_request, iid: comment.id, errors: e.message }
+ end
+ end
+ end
+
+ def pull_request_comment_attributes(comment)
+ author = find_user_id(comment.author_email)
+ note = ''
+
+ unless author
+ author = project.creator_id
+ note = "*By #{comment.author_username} (#{comment.author_email})*\n\n"
+ end
+
+ note +=
+ # Provide some context for replying
+ if comment.parent_comment
+ "> #{comment.parent_comment.note.truncate(80)}\n\n#{comment.note}"
+ else
+ comment.note
+ end
+
+ {
+ project: project,
+ note: note,
+ author_id: author,
+ created_at: comment.created_at,
+ updated_at: comment.updated_at
+ }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bitbucket_server_import/project_creator.rb b/lib/gitlab/bitbucket_server_import/project_creator.rb
new file mode 100644
index 00000000000..35e8cd7e0ab
--- /dev/null
+++ b/lib/gitlab/bitbucket_server_import/project_creator.rb
@@ -0,0 +1,36 @@
+module Gitlab
+ module BitbucketServerImport
+ class ProjectCreator
+ attr_reader :project_key, :repo_slug, :repo, :name, :namespace, :current_user, :session_data
+
+ def initialize(project_key, repo_slug, repo, name, namespace, current_user, session_data)
+ @project_key = project_key
+ @repo_slug = repo_slug
+ @repo = repo
+ @name = name
+ @namespace = namespace
+ @current_user = current_user
+ @session_data = session_data
+ end
+
+ def execute
+ ::Projects::CreateService.new(
+ current_user,
+ name: name,
+ path: name,
+ description: repo.description,
+ namespace_id: namespace.id,
+ visibility_level: repo.visibility_level,
+ import_type: 'bitbucket_server',
+ import_source: repo.browse_url,
+ import_url: repo.clone_url,
+ import_data: {
+ credentials: session_data,
+ data: { project_key: project_key, repo_slug: repo_slug }
+ },
+ skip_wiki: true
+ ).execute
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/lfs_integrity.rb b/lib/gitlab/checks/lfs_integrity.rb
index f0e5773ec3c..b816a8f00cd 100644
--- a/lib/gitlab/checks/lfs_integrity.rb
+++ b/lib/gitlab/checks/lfs_integrity.rb
@@ -1,8 +1,6 @@
module Gitlab
module Checks
class LfsIntegrity
- REV_LIST_OBJECT_LIMIT = 2_000
-
def initialize(project, newrev)
@project = project
@newrev = newrev
@@ -11,7 +9,8 @@ module Gitlab
def objects_missing?
return false unless @newrev && @project.lfs_enabled?
- new_lfs_pointers = Gitlab::Git::LfsChanges.new(@project.repository, @newrev).new_pointers(object_limit: REV_LIST_OBJECT_LIMIT)
+ new_lfs_pointers = Gitlab::Git::LfsChanges.new(@project.repository, @newrev)
+ .new_pointers(object_limit: ::Gitlab::Git::Repository::REV_LIST_COMMIT_LIMIT)
return false unless new_lfs_pointers.present?
diff --git a/lib/gitlab/ci/build/artifacts/gzip_file_adapter.rb b/lib/gitlab/ci/build/artifacts/gzip_file_adapter.rb
new file mode 100644
index 00000000000..65f65cdce08
--- /dev/null
+++ b/lib/gitlab/ci/build/artifacts/gzip_file_adapter.rb
@@ -0,0 +1,46 @@
+module Gitlab
+ module Ci
+ module Build
+ module Artifacts
+ class GzipFileAdapter
+ attr_reader :stream
+
+ InvalidStreamError = Class.new(StandardError)
+
+ def initialize(stream)
+ raise InvalidStreamError, "Stream is required" unless stream
+
+ @stream = stream
+ end
+
+ def each_blob
+ stream.seek(0)
+
+ until stream.eof?
+ gzip(stream) do |gz|
+ yield gz.read, gz.orig_name
+ unused = gz.unused&.length.to_i
+ # pos has already reached to EOF at the moment
+ # We rewind the pos to the top of unused files
+ # to read next gzip stream, to support multistream archives
+ # https://golang.org/src/compress/gzip/gunzip.go#L117
+ stream.seek(-unused, IO::SEEK_CUR)
+ end
+ end
+ end
+
+ private
+
+ def gzip(stream, &block)
+ gz = Zlib::GzipReader.new(stream)
+ yield(gz)
+ rescue Zlib::Error => e
+ raise InvalidStreamError, e.message
+ ensure
+ gz&.finish
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/parsers.rb b/lib/gitlab/ci/parsers.rb
new file mode 100644
index 00000000000..a4eccc08dfc
--- /dev/null
+++ b/lib/gitlab/ci/parsers.rb
@@ -0,0 +1,9 @@
+module Gitlab
+ module Ci
+ module Parsers
+ def self.fabricate!(file_type)
+ "Gitlab::Ci::Parsers::#{file_type.classify}".constantize.new
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/parsers/junit.rb b/lib/gitlab/ci/parsers/junit.rb
new file mode 100644
index 00000000000..3c4668ec13b
--- /dev/null
+++ b/lib/gitlab/ci/parsers/junit.rb
@@ -0,0 +1,69 @@
+module Gitlab
+ module Ci
+ module Parsers
+ class Junit
+ attr_reader :data
+
+ JunitParserError = Class.new(StandardError)
+
+ def parse!(xml_data, test_suite)
+ @data = Hash.from_xml(xml_data)
+
+ each_suite do |testcases|
+ testcases.each do |testcase|
+ test_case = create_test_case(testcase)
+ test_suite.add_test_case(test_case)
+ end
+ end
+ rescue REXML::ParseException => e
+ raise JunitParserError, "XML parsing failed: #{e.message}"
+ rescue => e
+ raise JunitParserError, "JUnit parsing failed: #{e.message}"
+ end
+
+ private
+
+ def each_suite
+ testsuites.each do |testsuite|
+ yield testcases(testsuite)
+ end
+ end
+
+ def testsuites
+ if data['testsuites']
+ data['testsuites']['testsuite']
+ else
+ [data['testsuite']]
+ end
+ end
+
+ def testcases(testsuite)
+ if testsuite['testcase'].is_a?(Array)
+ testsuite['testcase']
+ else
+ [testsuite['testcase']]
+ end
+ end
+
+ def create_test_case(data)
+ if data['failure']
+ status = ::Gitlab::Ci::Reports::TestCase::STATUS_FAILED
+ system_output = data['failure']
+ else
+ status = ::Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS
+ system_output = nil
+ end
+
+ ::Gitlab::Ci::Reports::TestCase.new(
+ classname: data['classname'],
+ name: data['name'],
+ file: data['file'],
+ execution_time: data['time'],
+ status: status,
+ system_output: system_output
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_case.rb b/lib/gitlab/ci/reports/test_case.rb
new file mode 100644
index 00000000000..b4d08ed257f
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_case.rb
@@ -0,0 +1,32 @@
+module Gitlab
+ module Ci
+ module Reports
+ class TestCase
+ STATUS_SUCCESS = 'success'.freeze
+ STATUS_FAILED = 'failed'.freeze
+ STATUS_SKIPPED = 'skipped'.freeze
+ STATUS_ERROR = 'error'.freeze
+ STATUS_TYPES = [STATUS_SUCCESS, STATUS_FAILED, STATUS_SKIPPED, STATUS_ERROR].freeze
+
+ attr_reader :name, :classname, :execution_time, :status, :file, :system_output, :stack_trace, :key
+
+ def initialize(name:, classname:, execution_time:, status:, file: nil, system_output: nil, stack_trace: nil)
+ @name = name
+ @classname = classname
+ @file = file
+ @execution_time = execution_time.to_f
+ @status = status
+ @system_output = system_output
+ @stack_trace = stack_trace
+ @key = sanitize_key_name("#{classname}_#{name}")
+ end
+
+ private
+
+ def sanitize_key_name(key)
+ key.gsub(/[^0-9A-Za-z]/, '-')
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_reports.rb b/lib/gitlab/ci/reports/test_reports.rb
new file mode 100644
index 00000000000..c6e732e68eb
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_reports.rb
@@ -0,0 +1,39 @@
+module Gitlab
+ module Ci
+ module Reports
+ class TestReports
+ attr_reader :test_suites
+
+ def initialize
+ @test_suites = {}
+ end
+
+ def get_suite(suite_name)
+ test_suites[suite_name] ||= TestSuite.new(suite_name)
+ end
+
+ def total_time
+ test_suites.values.sum(&:total_time)
+ end
+
+ def total_count
+ test_suites.values.sum(&:total_count)
+ end
+
+ def total_status
+ if failed_count > 0 || error_count > 0
+ TestCase::STATUS_FAILED
+ else
+ TestCase::STATUS_SUCCESS
+ end
+ end
+
+ TestCase::STATUS_TYPES.each do |status_type|
+ define_method("#{status_type}_count") do
+ test_suites.values.sum { |suite| suite.public_send("#{status_type}_count") } # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_reports_comparer.rb b/lib/gitlab/ci/reports/test_reports_comparer.rb
new file mode 100644
index 00000000000..c0943f5a51a
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_reports_comparer.rb
@@ -0,0 +1,38 @@
+module Gitlab
+ module Ci
+ module Reports
+ class TestReportsComparer
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :base_reports, :head_reports
+
+ def initialize(base_reports, head_reports)
+ @base_reports = base_reports || TestReports.new
+ @head_reports = head_reports
+ end
+
+ def suite_comparers
+ strong_memoize(:suite_comparers) do
+ head_reports.test_suites.map do |name, test_suite|
+ TestSuiteComparer.new(name, base_reports.get_suite(name), test_suite)
+ end
+ end
+ end
+
+ def total_status
+ if suite_comparers.any? { |suite| suite.total_status == TestCase::STATUS_FAILED }
+ TestCase::STATUS_FAILED
+ else
+ TestCase::STATUS_SUCCESS
+ end
+ end
+
+ %w(total_count resolved_count failed_count).each do |method|
+ define_method(method) do
+ suite_comparers.sum { |suite| suite.public_send(method) } # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_suite.rb b/lib/gitlab/ci/reports/test_suite.rb
new file mode 100644
index 00000000000..b722d0ba735
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_suite.rb
@@ -0,0 +1,54 @@
+module Gitlab
+ module Ci
+ module Reports
+ class TestSuite
+ attr_reader :name
+ attr_reader :test_cases
+ attr_reader :total_time
+
+ def initialize(name = nil)
+ @name = name
+ @test_cases = {}
+ @total_time = 0.0
+ @duplicate_cases = []
+ end
+
+ def add_test_case(test_case)
+ @duplicate_cases << test_case if existing_key?(test_case)
+
+ @test_cases[test_case.status] ||= {}
+ @test_cases[test_case.status][test_case.key] = test_case
+ @total_time += test_case.execution_time
+ end
+
+ def total_count
+ test_cases.values.sum(&:count)
+ end
+
+ def total_status
+ if failed_count > 0 || error_count > 0
+ TestCase::STATUS_FAILED
+ else
+ TestCase::STATUS_SUCCESS
+ end
+ end
+
+ TestCase::STATUS_TYPES.each do |status_type|
+ define_method("#{status_type}") do
+ test_cases[status_type] || {}
+ end
+
+ define_method("#{status_type}_count") do
+ test_cases[status_type]&.length.to_i
+ end
+ end
+
+ private
+
+ def existing_key?(test_case)
+ @test_cases[test_case.status]&.key?(test_case.key)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_suite_comparer.rb b/lib/gitlab/ci/reports/test_suite_comparer.rb
new file mode 100644
index 00000000000..642aa593092
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_suite_comparer.rb
@@ -0,0 +1,57 @@
+module Gitlab
+ module Ci
+ module Reports
+ class TestSuiteComparer
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :name, :base_suite, :head_suite
+
+ def initialize(name, base_suite, head_suite)
+ @name = name
+ @base_suite = base_suite || TestSuite.new
+ @head_suite = head_suite
+ end
+
+ def new_failures
+ strong_memoize(:new_failures) do
+ head_suite.failed.reject do |key, _|
+ base_suite.failed.include?(key)
+ end.values
+ end
+ end
+
+ def existing_failures
+ strong_memoize(:existing_failures) do
+ head_suite.failed.select do |key, _|
+ base_suite.failed.include?(key)
+ end.values
+ end
+ end
+
+ def resolved_failures
+ strong_memoize(:resolved_failures) do
+ head_suite.success.select do |key, _|
+ base_suite.failed.include?(key)
+ end.values
+ end
+ end
+
+ def total_count
+ head_suite.total_count
+ end
+
+ def total_status
+ head_suite.total_status
+ end
+
+ def resolved_count
+ resolved_failures.count
+ end
+
+ def failed_count
+ new_failures.count + existing_failures.count
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cleanup/project_uploads.rb b/lib/gitlab/cleanup/project_uploads.rb
index b88e00311d5..f55ab535efe 100644
--- a/lib/gitlab/cleanup/project_uploads.rb
+++ b/lib/gitlab/cleanup/project_uploads.rb
@@ -40,7 +40,7 @@ module Gitlab
# Accepts a path in the form of "#{hex_secret}/#{filename}"
def find_correct_path(upload_path)
upload = Upload.find_by(uploader: 'FileUploader', path: upload_path)
- return unless upload && upload.local?
+ return unless upload && upload.local? && upload.model
upload.absolute_path
rescue => e
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index 4fe5b4cc835..f39b3b6eb5b 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -979,8 +979,8 @@ into similar problems in the future (e.g. when new tables are created).
# To not overload the worker too much we enforce a minimum interval both
# when scheduling and performing jobs.
- if delay_interval < BackgroundMigrationWorker::MIN_INTERVAL
- delay_interval = BackgroundMigrationWorker::MIN_INTERVAL
+ if delay_interval < BackgroundMigrationWorker.minimum_interval
+ delay_interval = BackgroundMigrationWorker.minimum_interval
end
model_class.each_batch(of: batch_size) do |relation, index|
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 73151e4a4c5..de189ac6dfc 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -19,6 +19,7 @@ module Gitlab
GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
].freeze
SEARCH_CONTEXT_LINES = 3
+ REV_LIST_COMMIT_LIMIT = 2_000
# In https://gitlab.com/gitlab-org/gitaly/merge_requests/698
# We copied these two prefixes into gitaly-go, so don't change these
# or things will break! (REBASE_WORKTREE_PREFIX and SQUASH_WORKTREE_PREFIX)
@@ -380,6 +381,16 @@ module Gitlab
end
end
+ def new_blobs(newrev)
+ return [] if newrev == ::Gitlab::Git::BLANK_SHA
+
+ strong_memoize("new_blobs_#{newrev}") do
+ wrapped_gitaly_errors do
+ gitaly_ref_client.list_new_blobs(newrev, REV_LIST_COMMIT_LIMIT)
+ end
+ end
+ end
+
def count_commits(options)
options = process_count_commits_options(options.dup)
diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb
index fbe7d4ba1ae..8acc22e809e 100644
--- a/lib/gitlab/gitaly_client/ref_service.rb
+++ b/lib/gitlab/gitaly_client/ref_service.rb
@@ -82,6 +82,23 @@ module Gitlab
commits
end
+ def list_new_blobs(newrev, limit = 0)
+ request = Gitaly::ListNewBlobsRequest.new(
+ repository: @gitaly_repo,
+ commit_id: newrev,
+ limit: limit
+ )
+
+ response = GitalyClient
+ .call(@storage, :ref_service, :list_new_blobs, request, timeout: GitalyClient.medium_timeout)
+
+ response.flat_map do |msg|
+ # Returns an Array of Gitaly::NewBlobObject objects
+ # Available methods are: #size, #oid and #path
+ msg.new_blob_objects
+ end
+ end
+
def count_tag_names
tag_names.count
end
@@ -166,7 +183,7 @@ module Gitlab
except_with_prefix: except_with_prefixes.map { |r| encode_binary(r) }
)
- response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request, timeout: GitalyClient.fast_timeout)
+ response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request, timeout: GitalyClient.default_timeout)
raise Gitlab::Git::Repository::GitError, response.git_error if response.git_error.present?
end
diff --git a/lib/gitlab/hashed_storage/migrator.rb b/lib/gitlab/hashed_storage/migrator.rb
index 9251ed654cd..d11fcc6a3e3 100644
--- a/lib/gitlab/hashed_storage/migrator.rb
+++ b/lib/gitlab/hashed_storage/migrator.rb
@@ -30,7 +30,7 @@ module Gitlab
end
end
- # Flag a project to me migrated
+ # Flag a project to be migrated
#
# @param [Object] project that will be migrated
def migrate(project)
diff --git a/lib/gitlab/import_export/command_line_util.rb b/lib/gitlab/import_export/command_line_util.rb
index 2f163db936b..3adc44f8044 100644
--- a/lib/gitlab/import_export/command_line_util.rb
+++ b/lib/gitlab/import_export/command_line_util.rb
@@ -18,6 +18,21 @@ module Gitlab
private
+ def download_or_copy_upload(uploader, upload_path)
+ if uploader.upload.local?
+ copy_files(uploader.path, upload_path)
+ else
+ download(uploader.url, upload_path)
+ end
+ end
+
+ def download(url, upload_path)
+ File.open(upload_path, 'w') do |file|
+ # Download (stream) file from the uploader's location
+ IO.copy_stream(URI.parse(url).open, file)
+ end
+ end
+
def tar_with_options(archive:, dir:, options:)
execute(%W(tar -#{options} #{archive} -C #{dir} .))
end
diff --git a/lib/gitlab/import_export/file_importer.rb b/lib/gitlab/import_export/file_importer.rb
index 4c411f4847e..7fd66b4e244 100644
--- a/lib/gitlab/import_export/file_importer.rb
+++ b/lib/gitlab/import_export/file_importer.rb
@@ -10,15 +10,18 @@ module Gitlab
new(*args).import
end
- def initialize(archive_file:, shared:)
+ def initialize(project:, archive_file:, shared:)
+ @project = project
@archive_file = archive_file
@shared = shared
end
def import
mkdir_p(@shared.export_path)
+ mkdir_p(@shared.archive_path)
- remove_symlinks!
+ remove_symlinks
+ copy_archive
wait_for_archived_file do
decompress_archive
@@ -27,7 +30,8 @@ module Gitlab
@shared.error(e)
false
ensure
- remove_symlinks!
+ remove_import_file
+ remove_symlinks
end
private
@@ -51,7 +55,15 @@ module Gitlab
result
end
- def remove_symlinks!
+ def copy_archive
+ return if @archive_file
+
+ @archive_file = File.join(@shared.archive_path, Gitlab::ImportExport.export_filename(project: @project))
+
+ download_or_copy_upload(@project.import_export_upload.import_file, @archive_file)
+ end
+
+ def remove_symlinks
extracted_files.each do |path|
FileUtils.rm(path) if File.lstat(path).symlink?
end
@@ -59,6 +71,10 @@ module Gitlab
true
end
+ def remove_import_file
+ FileUtils.rm_rf(@archive_file)
+ end
+
def extracted_files
Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| IGNORED_FILENAMES.include?(File.basename(f)) }
end
diff --git a/lib/gitlab/import_export/importer.rb b/lib/gitlab/import_export/importer.rb
index 63cab07324a..4e179f63d8c 100644
--- a/lib/gitlab/import_export/importer.rb
+++ b/lib/gitlab/import_export/importer.rb
@@ -35,7 +35,8 @@ module Gitlab
end
def import_file
- Gitlab::ImportExport::FileImporter.import(archive_file: @archive_file,
+ Gitlab::ImportExport::FileImporter.import(project: @project,
+ archive_file: @archive_file,
shared: @shared)
end
@@ -91,7 +92,14 @@ module Gitlab
end
def remove_import_file
- FileUtils.rm_rf(@archive_file)
+ return unless Gitlab::ImportExport.object_storage?
+
+ upload = @project.import_export_upload
+
+ return unless upload&.import_file&.file
+
+ upload.remove_import_file!
+ upload.save!
end
def overwrite_project
diff --git a/lib/gitlab/import_export/uploads_manager.rb b/lib/gitlab/import_export/uploads_manager.rb
index 1110149712d..07875ebb56a 100644
--- a/lib/gitlab/import_export/uploads_manager.rb
+++ b/lib/gitlab/import_export/uploads_manager.rb
@@ -91,10 +91,7 @@ module Gitlab
mkdir_p(File.join(uploads_export_path, secret))
- File.open(upload_path, 'w') do |file|
- # Download (stream) file from the uploader's location
- IO.copy_stream(URI.parse(upload.file.url).open, file)
- end
+ download_or_copy_upload(upload, upload_path)
end
end
end
diff --git a/lib/gitlab/import_sources.rb b/lib/gitlab/import_sources.rb
index 45816bee176..f7f5c5787f6 100644
--- a/lib/gitlab/import_sources.rb
+++ b/lib/gitlab/import_sources.rb
@@ -9,15 +9,16 @@ module Gitlab
# We exclude `bare_repository` here as it has no import class associated
ImportTable = [
- ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
- ImportSource.new('bitbucket', 'Bitbucket', Gitlab::BitbucketImport::Importer),
- ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
- ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
- ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
- ImportSource.new('git', 'Repo by URL', nil),
- ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
- ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer),
- ImportSource.new('manifest', 'Manifest file', nil)
+ ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
+ ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
+ ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
+ ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
+ ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
+ ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
+ ImportSource.new('git', 'Repo by URL', nil),
+ ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
+ ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer),
+ ImportSource.new('manifest', 'Manifest file', nil)
].freeze
class << self
diff --git a/lib/gitlab/kubernetes/helm.rb b/lib/gitlab/kubernetes/helm.rb
index 0f0588b8b23..530ccf88053 100644
--- a/lib/gitlab/kubernetes/helm.rb
+++ b/lib/gitlab/kubernetes/helm.rb
@@ -1,7 +1,7 @@
module Gitlab
module Kubernetes
module Helm
- HELM_VERSION = '2.7.0'.freeze
+ HELM_VERSION = '2.7.2'.freeze
NAMESPACE = 'gitlab-managed-apps'.freeze
end
end
diff --git a/lib/gitlab/regex.rb b/lib/gitlab/regex.rb
index e1a958c508a..0f26fcfe8cb 100644
--- a/lib/gitlab/regex.rb
+++ b/lib/gitlab/regex.rb
@@ -99,5 +99,9 @@ module Gitlab
)
}mx
end
+
+ def jira_transition_id_regex
+ @jira_transition_id_regex ||= /\d+/
+ end
end
end
diff --git a/lib/gitlab/template_helper.rb b/lib/gitlab/template_helper.rb
index 3b8e45e0688..f24a01e6cf5 100644
--- a/lib/gitlab/template_helper.rb
+++ b/lib/gitlab/template_helper.rb
@@ -2,11 +2,17 @@ module Gitlab
module TemplateHelper
include Gitlab::Utils::StrongMemoize
- def prepare_template_environment(file_path)
- return unless file_path.present?
+ def prepare_template_environment(file)
+ return unless file
- FileUtils.mkdir_p(File.dirname(import_upload_path))
- FileUtils.copy_entry(file_path, import_upload_path)
+ if Gitlab::ImportExport.object_storage?
+ params[:import_export_upload] = ImportExportUpload.new(import_file: file)
+ else
+ FileUtils.mkdir_p(File.dirname(import_upload_path))
+ FileUtils.copy_entry(file.path, import_upload_path)
+
+ params[:import_source] = import_upload_path
+ end
end
def import_upload_path