Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'app/services/ci')
-rw-r--r--app/services/ci/authorize_job_artifact_service.rb53
-rw-r--r--app/services/ci/create_job_artifacts_service.rb122
-rw-r--r--app/services/ci/create_pipeline_service.rb29
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb2
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service.rb2
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb2
-rw-r--r--app/services/ci/pipeline_processing/legacy_processing_service.rb4
-rw-r--r--app/services/ci/process_pipeline_service.rb10
-rw-r--r--app/services/ci/register_job_service.rb19
-rw-r--r--app/services/ci/retry_build_service.rb8
-rw-r--r--app/services/ci/unlock_artifacts_service.rb33
11 files changed, 173 insertions, 111 deletions
diff --git a/app/services/ci/authorize_job_artifact_service.rb b/app/services/ci/authorize_job_artifact_service.rb
deleted file mode 100644
index 893e92d427c..00000000000
--- a/app/services/ci/authorize_job_artifact_service.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class AuthorizeJobArtifactService
- include Gitlab::Utils::StrongMemoize
-
- # Max size of the zipped LSIF artifact
- LSIF_ARTIFACT_MAX_SIZE = 20.megabytes
- LSIF_ARTIFACT_TYPE = 'lsif'
-
- def initialize(job, params, max_size:)
- @job = job
- @max_size = max_size
- @size = params[:filesize]
- @type = params[:artifact_type].to_s
- end
-
- def forbidden?
- lsif? && !code_navigation_enabled?
- end
-
- def too_large?
- size && max_size <= size.to_i
- end
-
- def headers
- default_headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size)
- default_headers.tap do |h|
- h[:ProcessLsif] = true if lsif? && code_navigation_enabled?
- end
- end
-
- private
-
- attr_reader :job, :size, :type
-
- def code_navigation_enabled?
- strong_memoize(:code_navigation_enabled) do
- Feature.enabled?(:code_navigation, job.project, default_enabled: true)
- end
- end
-
- def lsif?
- strong_memoize(:lsif) do
- type == LSIF_ARTIFACT_TYPE
- end
- end
-
- def max_size
- lsif? ? LSIF_ARTIFACT_MAX_SIZE : @max_size.to_i
- end
- end
-end
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
index f0ffe67510b..9a6e103e5dd 100644
--- a/app/services/ci/create_job_artifacts_service.rb
+++ b/app/services/ci/create_job_artifacts_service.rb
@@ -3,42 +3,104 @@
module Ci
class CreateJobArtifactsService < ::BaseService
ArtifactsExistError = Class.new(StandardError)
+
+ LSIF_ARTIFACT_TYPE = 'lsif'
+
OBJECT_STORAGE_ERRORS = [
Errno::EIO,
Google::Apis::ServerError,
Signet::RemoteServerError
].freeze
- def execute(job, artifacts_file, params, metadata_file: nil)
- return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
+ def initialize(job)
+ @job = job
+ @project = job.project
+ end
+
+ def authorize(artifact_type:, filesize: nil)
+ result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
+ return result unless result[:status] == :success
+
+ headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
- artifact, artifact_metadata = build_artifact(job, artifacts_file, params, metadata_file)
- result = parse_artifact(job, artifact)
+ if lsif?(artifact_type)
+ headers[:ProcessLsif] = true
+ headers[:ProcessLsifReferences] = Feature.enabled?(:code_navigation_references, project, default_enabled: false)
+ end
+ success(headers: headers)
+ end
+
+ def execute(artifacts_file, params, metadata_file: nil)
+ result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
return result unless result[:status] == :success
- persist_artifact(job, artifact, artifact_metadata)
+ return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
+
+ artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
+ result = parse_artifact(artifact)
+
+ return result unless result[:status] == :success
+
+ persist_artifact(artifact, artifact_metadata, params)
end
private
- def build_artifact(job, artifacts_file, params, metadata_file)
+ attr_reader :job, :project
+
+ def validate_requirements(artifact_type:, filesize:)
+ return forbidden_type_error(artifact_type) if forbidden_type?(artifact_type)
+ return too_large_error if too_large?(artifact_type, filesize)
+
+ success
+ end
+
+ def forbidden_type?(type)
+ lsif?(type) && !code_navigation_enabled?
+ end
+
+ def too_large?(type, size)
+ size > max_size(type) if size
+ end
+
+ def code_navigation_enabled?
+ Feature.enabled?(:code_navigation, project, default_enabled: true)
+ end
+
+ def lsif?(type)
+ type == LSIF_ARTIFACT_TYPE
+ end
+
+ def max_size(type)
+ Ci::JobArtifact.max_artifact_size(type: type, project: project)
+ end
+
+ def forbidden_type_error(type)
+ error("#{type} artifacts are forbidden", :forbidden)
+ end
+
+ def too_large_error
+ error('file size has reached maximum size limit', :payload_too_large)
+ end
+
+ def build_artifact(artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new(
job_id: job.id,
- project: job.project,
+ project: project,
file: artifacts_file,
- file_type: params['artifact_type'],
- file_format: params['artifact_format'],
+ file_type: params[:artifact_type],
+ file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
- project: job.project,
+ project: project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
@@ -46,31 +108,25 @@ module Ci
expire_in: expire_in)
end
- if Feature.enabled?(:keep_latest_artifact_for_ref, job.project)
- artifact.locked = true
- artifact_metadata&.locked = true
- end
-
[artifact, artifact_metadata]
end
- def parse_artifact(job, artifact)
- unless Feature.enabled?(:ci_synchronous_artifact_parsing, job.project, default_enabled: true)
+ def parse_artifact(artifact)
+ unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
return success
end
case artifact.file_type
- when 'dotenv' then parse_dotenv_artifact(job, artifact)
- when 'cluster_applications' then parse_cluster_applications_artifact(job, artifact)
+ when 'dotenv' then parse_dotenv_artifact(artifact)
+ when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success
end
end
- def persist_artifact(job, artifact, artifact_metadata)
+ def persist_artifact(artifact, artifact_metadata, params)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
- unlock_previous_artifacts!(artifact)
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
@@ -78,42 +134,36 @@ module Ci
success
rescue ActiveRecord::RecordNotUnique => error
- track_exception(error, job, params)
+ track_exception(error, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
- track_exception(error, job, params)
+ track_exception(error, params)
error(error.message, :service_unavailable)
rescue => error
- track_exception(error, job, params)
+ track_exception(error, params)
error(error.message, :bad_request)
end
- def unlock_previous_artifacts!(artifact)
- return unless Feature.enabled?(:keep_latest_artifact_for_ref, artifact.job.project)
-
- Ci::JobArtifact.for_ref(artifact.job.ref, artifact.project_id).locked.update_all(locked: false)
- end
-
- def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
+ def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256
end
- def track_exception(error, job, params)
+ def track_exception(error, params)
Gitlab::ErrorTracking.track_exception(error,
job_id: job.id,
project_id: job.project_id,
- uploading_type: params['artifact_type']
+ uploading_type: params[:artifact_type]
)
end
- def parse_dotenv_artifact(job, artifact)
- Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact)
+ def parse_dotenv_artifact(artifact)
+ Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end
- def parse_cluster_applications_artifact(job, artifact)
+ def parse_cluster_applications_artifact(artifact)
Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
end
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 922c3556362..2d7f5014aa9 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -23,6 +23,24 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Limit::Activity,
Gitlab::Ci::Pipeline::Chain::Limit::JobActivity].freeze
+ # Create a new pipeline in the specified project.
+ #
+ # @param [Symbol] source What event (Ci::Pipeline.sources) triggers the pipeline
+ # creation.
+ # @param [Boolean] ignore_skip_ci Whether skipping a pipeline creation when `[skip ci]` comment
+ # is present in the commit body
+ # @param [Boolean] save_on_errors Whether persisting an invalid pipeline when it encounters an
+ # error during creation (e.g. invalid yaml)
+ # @param [Ci::TriggerRequest] trigger_request The pipeline trigger triggers the pipeline creation.
+ # @param [Ci::PipelineSchedule] schedule The pipeline schedule triggers the pipeline creation.
+ # @param [MergeRequest] merge_request The merge request triggers the pipeline creation.
+ # @param [ExternalPullRequest] external_pull_request The external pull request triggers the pipeline creation.
+ # @param [Ci::Bridge] bridge The bridge job that triggers the downstream pipeline creation.
+ # @param [String] content The content of .gitlab-ci.yml to override the default config
+ # contents (e.g. .gitlab-ci.yml in repostiry). Mainly used for
+ # generating a dangling pipeline.
+ #
+ # @return [Ci::Pipeline] The created Ci::Pipeline object.
# rubocop: disable Metrics/ParameterLists
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, merge_request: nil, external_pull_request: nil, bridge: nil, **options, &block)
@pipeline = Ci::Pipeline.new
@@ -77,7 +95,7 @@ module Ci
def execute!(*args, &block)
execute(*args, &block).tap do |pipeline|
unless pipeline.persisted?
- raise CreateError, pipeline.error_messages
+ raise CreateError, pipeline.full_error_messages
end
end
end
@@ -122,13 +140,8 @@ module Ci
end
end
- def extra_options(options = {})
- # In Ruby 2.4, even when options is empty, f(**options) doesn't work when f
- # doesn't have any parameters. We reproduce the Ruby 2.5 behavior by
- # checking explicitly that no arguments are given.
- raise ArgumentError if options.any?
-
- {} # overridden in EE
+ def extra_options(content: nil)
+ { content: content }
end
end
end
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index 5deb84812ac..1fa8926faa1 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -28,7 +28,7 @@ module Ci
private
def destroy_batch
- artifact_batch = if Feature.enabled?(:keep_latest_artifact_for_ref)
+ artifact_batch = if Gitlab::Ci::Features.destroy_only_unlocked_expired_artifacts_enabled?
Ci::JobArtifact.expired(BATCH_SIZE).unlocked
else
Ci::JobArtifact.expired(BATCH_SIZE)
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service.rb b/app/services/ci/pipeline_processing/atomic_processing_service.rb
index b01a9d2e3b8..a23d5d8941a 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service.rb
@@ -77,7 +77,7 @@ module Ci
def update_processable!(processable)
status = processable_status(processable)
- return unless HasStatus::COMPLETED_STATUSES.include?(status)
+ return unless Ci::HasStatus::COMPLETED_STATUSES.include?(status)
# transition status if possible
Gitlab::OptimisticLocking.retry_lock(processable) do |subject|
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
index 2228328882d..d0aa8b04775 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -80,7 +80,7 @@ module Ci
# TODO: This is hack to support
# the same exact behaviour for Atomic and Legacy processing
# that DAG is blocked from executing if dependent is not "complete"
- if dag && statuses.any? { |status| HasStatus::COMPLETED_STATUSES.exclude?(status[:status]) }
+ if dag && statuses.any? { |status| Ci::HasStatus::COMPLETED_STATUSES.exclude?(status[:status]) }
return 'pending'
end
diff --git a/app/services/ci/pipeline_processing/legacy_processing_service.rb b/app/services/ci/pipeline_processing/legacy_processing_service.rb
index c471f7f0011..56fbc7271da 100644
--- a/app/services/ci/pipeline_processing/legacy_processing_service.rb
+++ b/app/services/ci/pipeline_processing/legacy_processing_service.rb
@@ -35,7 +35,7 @@ module Ci
def process_stage_for_stage_scheduling(index)
current_status = status_for_prior_stages(index)
- return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
+ return unless Ci::HasStatus::COMPLETED_STATUSES.include?(current_status)
created_stage_scheduled_processables_in_stage(index).find_each.select do |build|
process_build(build, current_status)
@@ -73,7 +73,7 @@ module Ci
def process_dag_build_with_needs(build)
current_status = status_for_build_needs(build.needs.map(&:name))
- return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
+ return unless Ci::HasStatus::COMPLETED_STATUSES.include?(current_status)
process_build(build, current_status)
end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index 80ebe5f5eb6..1f24dce0458 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -9,6 +9,8 @@ module Ci
end
def execute(trigger_build_ids = nil, initial_process: false)
+ increment_processing_counter
+
update_retried
if ::Gitlab::Ci::Features.atomic_processing?(pipeline.project)
@@ -22,6 +24,10 @@ module Ci
end
end
+ def metrics
+ @metrics ||= ::Gitlab::Ci::Pipeline::Metrics.new
+ end
+
private
# This method is for compatibility and data consistency and should be removed with 9.3 version of GitLab
@@ -43,5 +49,9 @@ module Ci
.update_all(retried: true) if latest_statuses.any?
end
# rubocop: enable CodeReuse/ActiveRecord
+
+ def increment_processing_counter
+ metrics.pipeline_processing_events_counter.increment
+ end
end
end
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index 17b9e56636b..3797ea1d96c 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -11,7 +11,7 @@ module Ci
METRICS_SHARD_TAG_PREFIX = 'metrics_shard::'.freeze
DEFAULT_METRICS_SHARD = 'default'.freeze
- Result = Struct.new(:build, :valid?)
+ Result = Struct.new(:build, :build_json, :valid?)
def initialize(runner)
@runner = runner
@@ -59,7 +59,7 @@ module Ci
end
register_failure
- Result.new(nil, valid)
+ Result.new(nil, nil, valid)
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -71,7 +71,7 @@ module Ci
# In case when 2 runners try to assign the same build, second runner will be declined
# with StateMachines::InvalidTransition or StaleObjectError when doing run! or save method.
if assign_runner!(build, params)
- Result.new(build, true)
+ present_build!(build)
end
rescue StateMachines::InvalidTransition, ActiveRecord::StaleObjectError
# We are looping to find another build that is not conflicting
@@ -83,8 +83,10 @@ module Ci
# In case we hit the concurrency-access lock,
# we still have to return 409 in the end,
# to make sure that this is properly handled by runner.
- Result.new(nil, false)
+ Result.new(nil, nil, false)
rescue => ex
+ # If an error (e.g. GRPC::DeadlineExceeded) occurred constructing
+ # the result, consider this as a failure to be retried.
scheduler_failure!(build)
track_exception_for_build(ex, build)
@@ -92,6 +94,15 @@ module Ci
nil
end
+ # Force variables evaluation to occur now
+ def present_build!(build)
+ # We need to use the presenter here because Gitaly calls in the presenter
+ # may fail, and we need to ensure the response has been generated.
+ presented_build = ::Ci::BuildRunnerPresenter.new(build) # rubocop:disable CodeReuse/Presenter
+ build_json = ::API::Entities::JobRequest::Response.new(presented_build).to_json
+ Result.new(build, build_json, true)
+ end
+
def assign_runner!(build, params)
build.runner_id = runner.id
build.runner_session_attributes = params[:session] if params[:session].present?
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 23507a31c72..60b3d28b0c5 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -34,10 +34,6 @@ module Ci
attributes[:user] = current_user
- # TODO: we can probably remove this logic
- # see: https://gitlab.com/gitlab-org/gitlab/-/issues/217930
- attributes[:scheduling_type] ||= build.find_legacy_scheduling_type
-
Ci::Build.transaction do
# mark all other builds of that name as retried
build.pipeline.builds.latest
@@ -59,7 +55,9 @@ module Ci
build = project.builds.new(attributes)
build.assign_attributes(::Gitlab::Ci::Pipeline::Seed::Build.environment_attributes_for(build))
build.retried = false
- build.save!
+ BulkInsertableAssociations.with_bulk_insert(enabled: ::Gitlab::Ci::Features.bulk_insert_on_create?(project)) do
+ build.save!
+ end
build
end
end
diff --git a/app/services/ci/unlock_artifacts_service.rb b/app/services/ci/unlock_artifacts_service.rb
new file mode 100644
index 00000000000..07faf90dd6d
--- /dev/null
+++ b/app/services/ci/unlock_artifacts_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Ci
+ class UnlockArtifactsService < ::BaseService
+ BATCH_SIZE = 100
+
+ def execute(ci_ref, before_pipeline = nil)
+ query = <<~SQL.squish
+ UPDATE "ci_pipelines"
+ SET "locked" = #{::Ci::Pipeline.lockeds[:unlocked]}
+ WHERE "ci_pipelines"."id" in (
+ #{collect_pipelines(ci_ref, before_pipeline).select(:id).to_sql}
+ LIMIT #{BATCH_SIZE}
+ FOR UPDATE SKIP LOCKED
+ )
+ RETURNING "ci_pipelines"."id";
+ SQL
+
+ loop do
+ break if ActiveRecord::Base.connection.exec_query(query).empty?
+ end
+ end
+
+ private
+
+ def collect_pipelines(ci_ref, before_pipeline)
+ pipeline_scope = ci_ref.pipelines
+ pipeline_scope = pipeline_scope.before_pipeline(before_pipeline) if before_pipeline
+
+ pipeline_scope.artifacts_locked
+ end
+ end
+end