Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzegorz@gitlab.com>2018-06-05 16:51:31 +0300
committerGrzegorz Bizon <grzegorz@gitlab.com>2018-06-05 16:51:31 +0300
commitda246582db2577107ffdfbcf5741daccc8e029c0 (patch)
tree8b30561289ab8a11576bf5e134d70f3418913f73 /lib
parent809a50fcbf5ecfbf5ec02671f1ca2710a96f58d3 (diff)
parent9921830267ccef2321e9a57383b28daafd34fa78 (diff)
Merge branch 'master' into 'backstage/gb/use-persisted-stages-to-improve-pipelines-table'
# Conflicts: # db/schema.rb
Diffstat (limited to 'lib')
-rw-r--r--lib/api/issues.rb2
-rw-r--r--lib/api/jobs.rb1
-rw-r--r--lib/api/runner.rb2
-rw-r--r--lib/gitlab/auth.rb8
-rw-r--r--lib/gitlab/background_migration/archive_legacy_traces.rb24
-rw-r--r--lib/gitlab/git/repository.rb5
-rw-r--r--lib/gitlab/github_import/importer/pull_request_importer.rb56
-rw-r--r--lib/gitlab/utils/override.rb16
-rw-r--r--lib/object_storage/direct_upload.rb166
-rw-r--r--lib/tasks/gitlab/traces.rake4
10 files changed, 261 insertions, 23 deletions
diff --git a/lib/api/issues.rb b/lib/api/issues.rb
index b64f465ce56..25185d6edc8 100644
--- a/lib/api/issues.rb
+++ b/lib/api/issues.rb
@@ -16,7 +16,7 @@ module API
args[:scope] = args[:scope].underscore if args[:scope]
issues = IssuesFinder.new(current_user, args).execute
- .preload(:assignees, :labels, :notes, :timelogs, :project)
+ .preload(:assignees, :labels, :notes, :timelogs, :project, :author)
issues.reorder(args[:order_by] => args[:sort])
end
diff --git a/lib/api/jobs.rb b/lib/api/jobs.rb
index 54d1acbd412..e95b0dd5267 100644
--- a/lib/api/jobs.rb
+++ b/lib/api/jobs.rb
@@ -54,6 +54,7 @@ module API
pipeline = user_project.pipelines.find(params[:pipeline_id])
builds = pipeline.builds
builds = filter_builds(builds, params[:scope])
+ builds = builds.preload(:job_artifacts_archive)
present paginate(builds), with: Entities::Job
end
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index e9886c76870..db502697a19 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -205,7 +205,7 @@ module API
status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
- JobArtifactUploader.workhorse_authorize
+ JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_artifacts_size)
end
desc 'Upload artifacts for job' do
diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb
index 0f7a7b0ce8d..7de66539848 100644
--- a/lib/gitlab/auth.rb
+++ b/lib/gitlab/auth.rb
@@ -240,7 +240,7 @@ module Gitlab
return unless login == 'gitlab-ci-token'
return unless password
- build = ::Ci::Build.running.find_by_token(password)
+ build = find_build_by_token(password)
return unless build
return unless build.project.builds_enabled?
@@ -301,6 +301,12 @@ module Gitlab
REGISTRY_SCOPES
end
+
+ private
+
+ def find_build_by_token(token)
+ ::Ci::Build.running.find_by_token(token)
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/archive_legacy_traces.rb b/lib/gitlab/background_migration/archive_legacy_traces.rb
new file mode 100644
index 00000000000..5a4e5b2c471
--- /dev/null
+++ b/lib/gitlab/background_migration/archive_legacy_traces.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/AbcSize
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class ArchiveLegacyTraces
+ def perform(start_id, stop_id)
+ # This background migration directly refers to ::Ci::Build model which is defined in application code.
+ # In general, migration code should be isolated as much as possible in order to be idempotent.
+ # However, `archive!` method is too complicated to be replicated by coping its subsequent code.
+ # So we chose a way to use ::Ci::Build directly and we don't change the `archive!` method until 11.1
+ ::Ci::Build.finished.without_archived_trace
+ .where(id: start_id..stop_id).find_each do |build|
+ begin
+ build.trace.archive!
+ rescue => e
+ Rails.logger.error "Failed to archive live trace. id: #{build.id} message: #{e.message}"
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 4cbf20bfe76..7acf11e3c91 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -1397,6 +1397,11 @@ module Gitlab
def write_config(full_path:)
return unless full_path.present?
+ # This guard avoids Gitaly log/error spam
+ unless exists?
+ raise NoRepository, 'repository does not exist'
+ end
+
gitaly_migrate(:write_config) do |is_enabled|
if is_enabled
gitaly_repository_client.write_config(full_path: full_path)
diff --git a/lib/gitlab/github_import/importer/pull_request_importer.rb b/lib/gitlab/github_import/importer/pull_request_importer.rb
index 49d859f9624..b2f6cb7ad19 100644
--- a/lib/gitlab/github_import/importer/pull_request_importer.rb
+++ b/lib/gitlab/github_import/importer/pull_request_importer.rb
@@ -22,15 +22,22 @@ module Gitlab
end
def execute
- if (mr_id = create_merge_request)
- issuable_finder.cache_database_id(mr_id)
+ mr, already_exists = create_merge_request
+
+ if mr
+ insert_git_data(mr, already_exists)
+ issuable_finder.cache_database_id(mr.id)
end
end
# Creates the merge request and returns its ID.
#
# This method will return `nil` if the merge request could not be
- # created.
+ # created, otherwise it will return an Array containing the following
+ # values:
+ #
+ # 1. A MergeRequest instance.
+ # 2. A boolean indicating if the MR already exists.
def create_merge_request
author_id, author_found = user_finder.author_id_for(pull_request)
@@ -69,21 +76,42 @@ module Gitlab
merge_request_id = GithubImport
.insert_and_return_id(attributes, project.merge_requests)
- merge_request = project.merge_requests.find(merge_request_id)
-
- # These fields are set so we can create the correct merge request
- # diffs.
- merge_request.source_branch_sha = pull_request.source_branch_sha
- merge_request.target_branch_sha = pull_request.target_branch_sha
-
- merge_request.keep_around_commit
- merge_request.merge_request_diffs.create
-
- merge_request.id
+ [project.merge_requests.find(merge_request_id), false]
end
rescue ActiveRecord::InvalidForeignKey
# It's possible the project has been deleted since scheduling this
# job. In this case we'll just skip creating the merge request.
+ []
+ rescue ActiveRecord::RecordNotUnique
+ # It's possible we previously created the MR, but failed when updating
+ # the Git data. In this case we'll just continue working on the
+ # existing row.
+ [project.merge_requests.find_by(iid: pull_request.iid), true]
+ end
+
+ def insert_git_data(merge_request, already_exists = false)
+ # These fields are set so we can create the correct merge request
+ # diffs.
+ merge_request.source_branch_sha = pull_request.source_branch_sha
+ merge_request.target_branch_sha = pull_request.target_branch_sha
+
+ merge_request.keep_around_commit
+
+ # MR diffs normally use an "after_save" hook to pull data from Git.
+ # All of this happens in the transaction started by calling
+ # create/save/etc. This in turn can lead to these transactions being
+ # held open for much longer than necessary. To work around this we
+ # first save the diff, then populate it.
+ diff =
+ if already_exists
+ merge_request.merge_request_diffs.take
+ else
+ merge_request.merge_request_diffs.build
+ end
+
+ diff.importing = true
+ diff.save
+ diff.save_git_content
end
end
end
diff --git a/lib/gitlab/utils/override.rb b/lib/gitlab/utils/override.rb
index 8bf6bcb1fe2..7b2a62fed48 100644
--- a/lib/gitlab/utils/override.rb
+++ b/lib/gitlab/utils/override.rb
@@ -87,18 +87,28 @@ module Gitlab
end
def included(base = nil)
- return super if base.nil? # Rails concern, ignoring it
+ super
+
+ queue_verification(base)
+ end
+ alias_method :prepended, :included
+
+ def extended(mod)
super
+ queue_verification(mod.singleton_class)
+ end
+
+ def queue_verification(base)
+ return unless ENV['STATIC_VERIFICATION']
+
if base.is_a?(Class) # We could check for Class in `override`
# This could be `nil` if `override` was never called
Override.extensions[self]&.add_class(base)
end
end
- alias_method :prepended, :included
-
def self.extensions
@extensions ||= {}
end
diff --git a/lib/object_storage/direct_upload.rb b/lib/object_storage/direct_upload.rb
new file mode 100644
index 00000000000..61a69e7ffe4
--- /dev/null
+++ b/lib/object_storage/direct_upload.rb
@@ -0,0 +1,166 @@
+module ObjectStorage
+ #
+ # The DirectUpload c;ass generates a set of presigned URLs
+ # that can be used to upload data to object storage from untrusted component: Workhorse, Runner?
+ #
+ # For Google it assumes that the platform supports variable Content-Length.
+ #
+ # For AWS it initiates Multipart Upload and presignes a set of part uploads.
+ # Class calculates the best part size to be able to upload up to asked maximum size.
+ # The number of generated parts will never go above 100,
+ # but we will always try to reduce amount of generated parts.
+ # The part size is rounded-up to 5MB.
+ #
+ class DirectUpload
+ include Gitlab::Utils::StrongMemoize
+
+ TIMEOUT = 4.hours
+ EXPIRE_OFFSET = 15.minutes
+
+ MAXIMUM_MULTIPART_PARTS = 100
+ MINIMUM_MULTIPART_SIZE = 5.megabytes
+
+ attr_reader :credentials, :bucket_name, :object_name
+ attr_reader :has_length, :maximum_size
+
+ def initialize(credentials, bucket_name, object_name, has_length:, maximum_size: nil)
+ unless has_length
+ raise ArgumentError, 'maximum_size has to be specified if length is unknown' unless maximum_size
+ end
+
+ @credentials = credentials
+ @bucket_name = bucket_name
+ @object_name = object_name
+ @has_length = has_length
+ @maximum_size = maximum_size
+ end
+
+ def to_hash
+ {
+ Timeout: TIMEOUT,
+ GetURL: get_url,
+ StoreURL: store_url,
+ DeleteURL: delete_url,
+ MultipartUpload: multipart_upload_hash
+ }.compact
+ end
+
+ def multipart_upload_hash
+ return unless requires_multipart_upload?
+
+ {
+ PartSize: rounded_multipart_part_size,
+ PartURLs: multipart_part_urls,
+ CompleteURL: multipart_complete_url,
+ AbortURL: multipart_abort_url
+ }
+ end
+
+ def provider
+ credentials[:provider].to_s
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html
+ def get_url
+ connection.get_object_url(bucket_name, object_name, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectDELETE.html
+ def delete_url
+ connection.delete_object_url(bucket_name, object_name, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
+ def store_url
+ connection.put_object_url(bucket_name, object_name, expire_at, upload_options)
+ end
+
+ def multipart_part_urls
+ Array.new(number_of_multipart_parts) do |part_index|
+ multipart_part_upload_url(part_index + 1)
+ end
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadUploadPart.html
+ def multipart_part_upload_url(part_number)
+ connection.signed_url({
+ method: 'PUT',
+ bucket_name: bucket_name,
+ object_name: object_name,
+ query: { uploadId: upload_id, partNumber: part_number },
+ headers: upload_options
+ }, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
+ def multipart_complete_url
+ connection.signed_url({
+ method: 'POST',
+ bucket_name: bucket_name,
+ object_name: object_name,
+ query: { uploadId: upload_id },
+ headers: { 'Content-Type' => 'application/xml' }
+ }, expire_at)
+ end
+
+ # Implements https://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadAbort.html
+ def multipart_abort_url
+ connection.signed_url({
+ method: 'DELETE',
+ bucket_name: bucket_name,
+ object_name: object_name,
+ query: { uploadId: upload_id }
+ }, expire_at)
+ end
+
+ private
+
+ def rounded_multipart_part_size
+ # round multipart_part_size up to minimum_mulitpart_size
+ (multipart_part_size + MINIMUM_MULTIPART_SIZE - 1) / MINIMUM_MULTIPART_SIZE * MINIMUM_MULTIPART_SIZE
+ end
+
+ def multipart_part_size
+ maximum_size / number_of_multipart_parts
+ end
+
+ def number_of_multipart_parts
+ [
+ # round maximum_size up to minimum_mulitpart_size
+ (maximum_size + MINIMUM_MULTIPART_SIZE - 1) / MINIMUM_MULTIPART_SIZE,
+ MAXIMUM_MULTIPART_PARTS
+ ].min
+ end
+
+ def aws?
+ provider == 'AWS'
+ end
+
+ def requires_multipart_upload?
+ aws? && !has_length
+ end
+
+ def upload_id
+ return unless requires_multipart_upload?
+
+ strong_memoize(:upload_id) do
+ new_upload = connection.initiate_multipart_upload(bucket_name, object_name)
+ new_upload.body["UploadId"]
+ end
+ end
+
+ def expire_at
+ strong_memoize(:expire_at) do
+ Time.now + TIMEOUT + EXPIRE_OFFSET
+ end
+ end
+
+ def upload_options
+ { 'Content-Type' => 'application/octet-stream' }
+ end
+
+ def connection
+ @connection ||= ::Fog::Storage.new(credentials)
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/traces.rake b/lib/tasks/gitlab/traces.rake
index fd2a4f2d11a..ddcca69711f 100644
--- a/lib/tasks/gitlab/traces.rake
+++ b/lib/tasks/gitlab/traces.rake
@@ -8,9 +8,7 @@ namespace :gitlab do
logger = Logger.new(STDOUT)
logger.info('Archiving legacy traces')
- Ci::Build.finished
- .where('NOT EXISTS (?)',
- Ci::JobArtifact.select(1).trace.where('ci_builds.id = ci_job_artifacts.job_id'))
+ Ci::Build.finished.without_archived_trace
.order(id: :asc)
.find_in_batches(batch_size: 1000) do |jobs|
job_ids = jobs.map { |job| [job.id] }