Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'app/services/ci')
-rw-r--r--app/services/ci/create_pipeline_service.rb1
-rw-r--r--app/services/ci/destroy_pipeline_service.rb4
-rw-r--r--app/services/ci/external_pull_requests/create_pipeline_service.rb11
-rw-r--r--app/services/ci/generate_kubeconfig_service.rb62
-rw-r--r--app/services/ci/job_artifacts/create_service.rb41
-rw-r--r--app/services/ci/job_artifacts/destroy_all_expired_service.rb18
-rw-r--r--app/services/ci/job_artifacts/destroy_batch_service.rb23
-rw-r--r--app/services/ci/parse_dotenv_artifact_service.rb19
-rw-r--r--app/services/ci/retry_build_service.rb7
-rw-r--r--app/services/ci/unlock_artifacts_service.rb100
-rw-r--r--app/services/ci/update_build_state_service.rb4
11 files changed, 233 insertions, 57 deletions
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index ba9665555cc..540e8f7b970 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -25,6 +25,7 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Populate,
Gitlab::Ci::Pipeline::Chain::StopDryRun,
Gitlab::Ci::Pipeline::Chain::Create,
+ Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations,
Gitlab::Ci::Pipeline::Chain::Limit::Activity,
Gitlab::Ci::Pipeline::Chain::Limit::JobActivity,
Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines,
diff --git a/app/services/ci/destroy_pipeline_service.rb b/app/services/ci/destroy_pipeline_service.rb
index 476c7523d60..6fbde5d291c 100644
--- a/app/services/ci/destroy_pipeline_service.rb
+++ b/app/services/ci/destroy_pipeline_service.rb
@@ -12,7 +12,9 @@ module Ci
# Ci::Pipeline#destroy triggers `use_fast_destroy :job_artifacts` and
# ci_builds has ON DELETE CASCADE to ci_pipelines. The pipeline, the builds,
# job and pipeline artifacts all get destroyed here.
- pipeline.reset.destroy!
+ ::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/345664') do
+ pipeline.reset.destroy!
+ end
ServiceResponse.success(message: 'Pipeline not found')
rescue ActiveRecord::RecordNotFound
diff --git a/app/services/ci/external_pull_requests/create_pipeline_service.rb b/app/services/ci/external_pull_requests/create_pipeline_service.rb
index dd93ca4708e..66127c94d35 100644
--- a/app/services/ci/external_pull_requests/create_pipeline_service.rb
+++ b/app/services/ci/external_pull_requests/create_pipeline_service.rb
@@ -16,14 +16,9 @@ module Ci
private
def create_pipeline_for(pull_request)
- if ::Feature.enabled?(:ci_create_external_pr_pipeline_async, project, default_enabled: :yaml)
- Ci::ExternalPullRequests::CreatePipelineWorker.perform_async(
- project.id, current_user.id, pull_request.id
- )
- else
- Ci::CreatePipelineService.new(project, current_user, create_params(pull_request))
- .execute(:external_pull_request_event, external_pull_request: pull_request)
- end
+ Ci::ExternalPullRequests::CreatePipelineWorker.perform_async(
+ project.id, current_user.id, pull_request.id
+ )
end
def create_params(pull_request)
diff --git a/app/services/ci/generate_kubeconfig_service.rb b/app/services/ci/generate_kubeconfig_service.rb
new file mode 100644
index 00000000000..18f68c0ff09
--- /dev/null
+++ b/app/services/ci/generate_kubeconfig_service.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module Ci
+ class GenerateKubeconfigService
+ def initialize(build)
+ @build = build
+ @template = Gitlab::Kubernetes::Kubeconfig::Template.new
+ end
+
+ def execute
+ template.add_cluster(
+ name: cluster_name,
+ url: Gitlab::Kas.tunnel_url
+ )
+
+ agents.each do |agent|
+ user = user_name(agent)
+
+ template.add_user(
+ name: user,
+ token: agent_token(agent)
+ )
+
+ template.add_context(
+ name: context_name(agent),
+ cluster: cluster_name,
+ user: user
+ )
+ end
+
+ template
+ end
+
+ private
+
+ attr_reader :build, :template
+
+ def agents
+ build.pipeline.authorized_cluster_agents
+ end
+
+ def cluster_name
+ 'gitlab'
+ end
+
+ def user_name(agent)
+ ['agent', agent.id].join(delimiter)
+ end
+
+ def context_name(agent)
+ [agent.project.full_path, agent.name].join(delimiter)
+ end
+
+ def agent_token(agent)
+ ['ci', agent.id, build.token].join(delimiter)
+ end
+
+ def delimiter
+ ':'
+ end
+ end
+end
diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb
index 9fc7c3b4d40..7c67a2e175d 100644
--- a/app/services/ci/job_artifacts/create_service.rb
+++ b/app/services/ci/job_artifacts/create_service.rb
@@ -19,6 +19,7 @@ module Ci
def initialize(job)
@job = job
@project = job.project
+ @pipeline = job.pipeline if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, @project)
end
def authorize(artifact_type:, filesize: nil)
@@ -53,7 +54,7 @@ module Ci
private
- attr_reader :job, :project
+ attr_reader :job, :project, :pipeline
def validate_requirements(artifact_type:, filesize:)
return too_large_error if too_large?(artifact_type, filesize)
@@ -85,34 +86,38 @@ module Ci
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
- artifact = Ci::JobArtifact.new(
+ artifact_attributes = {
job_id: job.id,
project: project,
- file: artifacts_file,
- file_type: params[:artifact_type],
- file_format: params[:artifact_format],
- file_sha256: artifacts_file.sha256,
- expire_in: expire_in)
+ expire_in: expire_in
+ }
+
+ artifact_attributes[:locked] = pipeline.locked if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, project)
+
+ artifact = Ci::JobArtifact.new(
+ artifact_attributes.merge(
+ file: artifacts_file,
+ file_type: params[:artifact_type],
+ file_format: params[:artifact_format],
+ file_sha256: artifacts_file.sha256
+ )
+ )
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
- job_id: job.id,
- project: project,
- file: metadata_file,
- file_type: :metadata,
- file_format: :gzip,
- file_sha256: metadata_file.sha256,
- expire_in: expire_in)
+ artifact_attributes.merge(
+ file: metadata_file,
+ file_type: :metadata,
+ file_format: :gzip,
+ file_sha256: metadata_file.sha256
+ )
+ )
end
[artifact, artifact_metadata]
end
def parse_artifact(artifact)
- unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
- return success
- end
-
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(artifact)
else success
diff --git a/app/services/ci/job_artifacts/destroy_all_expired_service.rb b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
index 3e9cc95d135..e4f65736a58 100644
--- a/app/services/ci/job_artifacts/destroy_all_expired_service.rb
+++ b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
@@ -24,7 +24,11 @@ module Ci
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
- destroy_job_artifacts_with_slow_iteration(Time.current)
+ if ::Feature.enabled?(:ci_destroy_unlocked_job_artifacts)
+ destroy_unlocked_job_artifacts(Time.current)
+ else
+ destroy_job_artifacts_with_slow_iteration(Time.current)
+ end
end
@removed_artifacts_count
@@ -32,13 +36,21 @@ module Ci
private
+ def destroy_unlocked_job_artifacts(start_at)
+ loop_until(timeout: LOOP_TIMEOUT, limit: LOOP_LIMIT) do
+ artifacts = Ci::JobArtifact.expired_before(start_at).artifact_unlocked.limit(BATCH_SIZE)
+ service_response = destroy_batch(artifacts)
+ @removed_artifacts_count += service_response[:destroyed_artifacts_count]
+ end
+ end
+
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
# For performance reasons, join with ci_pipelines after the batch is queried.
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
artifacts = relation.unlocked
- service_response = destroy_batch_async(artifacts)
+ service_response = destroy_batch(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
break if loop_timeout?(start_at)
@@ -46,7 +58,7 @@ module Ci
end
end
- def destroy_batch_async(artifacts)
+ def destroy_batch(artifacts)
Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute
end
diff --git a/app/services/ci/job_artifacts/destroy_batch_service.rb b/app/services/ci/job_artifacts/destroy_batch_service.rb
index 8536b88ccc0..866b40c32d8 100644
--- a/app/services/ci/job_artifacts/destroy_batch_service.rb
+++ b/app/services/ci/job_artifacts/destroy_batch_service.rb
@@ -26,15 +26,18 @@ module Ci
def execute(update_stats: true)
return success(destroyed_artifacts_count: 0, statistics_updates: {}) if @job_artifacts.empty?
+ destroy_related_records(@job_artifacts)
+
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
- destroy_related_records(@job_artifacts)
end
+ after_batch_destroy_hook(@job_artifacts)
+
# This is executed outside of the transaction because it depends on Redis
update_project_statistics! if update_stats
- increment_monitoring_statistics(artifacts_count)
+ increment_monitoring_statistics(artifacts_count, artifacts_bytes)
success(destroyed_artifacts_count: artifacts_count,
statistics_updates: affected_project_statistics)
@@ -43,9 +46,12 @@ module Ci
private
- # This method is implemented in EE and it must do only database work
+ # Overriden in EE
def destroy_related_records(artifacts); end
+ # Overriden in EE
+ def after_batch_destroy_hook(artifacts); end
+
# using ! here since this can't be called inside a transaction
def update_project_statistics!
affected_project_statistics.each do |project, delta|
@@ -63,8 +69,9 @@ module Ci
end
end
- def increment_monitoring_statistics(size)
- metrics.increment_destroyed_artifacts(size)
+ def increment_monitoring_statistics(size, bytes)
+ metrics.increment_destroyed_artifacts_count(size)
+ metrics.increment_destroyed_artifacts_bytes(bytes)
end
def metrics
@@ -76,6 +83,12 @@ module Ci
@job_artifacts.count
end
end
+
+ def artifacts_bytes
+ strong_memoize(:artifacts_bytes) do
+ @job_artifacts.sum { |artifact| artifact.try(:size) || 0 }
+ end
+ end
end
end
end
diff --git a/app/services/ci/parse_dotenv_artifact_service.rb b/app/services/ci/parse_dotenv_artifact_service.rb
index 2ee9be476bb..725ecbcce5d 100644
--- a/app/services/ci/parse_dotenv_artifact_service.rb
+++ b/app/services/ci/parse_dotenv_artifact_service.rb
@@ -2,8 +2,7 @@
module Ci
class ParseDotenvArtifactService < ::BaseService
- MAX_ACCEPTABLE_DOTENV_SIZE = 5.kilobytes
- MAX_ACCEPTABLE_VARIABLES_COUNT = 20
+ include ::Gitlab::Utils::StrongMemoize
SizeLimitError = Class.new(StandardError)
ParserError = Class.new(StandardError)
@@ -27,9 +26,9 @@ module Ci
raise ArgumentError, 'Artifact is not dotenv file type'
end
- unless artifact.file.size < MAX_ACCEPTABLE_DOTENV_SIZE
+ unless artifact.file.size < dotenv_size_limit
raise SizeLimitError,
- "Dotenv Artifact Too Big. Maximum Allowable Size: #{MAX_ACCEPTABLE_DOTENV_SIZE}"
+ "Dotenv Artifact Too Big. Maximum Allowable Size: #{dotenv_size_limit}"
end
end
@@ -45,9 +44,9 @@ module Ci
end
end
- if variables.size > MAX_ACCEPTABLE_VARIABLES_COUNT
+ if variables.size > dotenv_variable_limit
raise SizeLimitError,
- "Dotenv files cannot have more than #{MAX_ACCEPTABLE_VARIABLES_COUNT} variables"
+ "Dotenv files cannot have more than #{dotenv_variable_limit} variables"
end
variables
@@ -60,5 +59,13 @@ module Ci
result.each(&:strip!)
end
+
+ def dotenv_variable_limit
+ strong_memoize(:dotenv_variable_limit) { project.actual_limits.dotenv_variables }
+ end
+
+ def dotenv_size_limit
+ strong_memoize(:dotenv_size_limit) { project.actual_limits.dotenv_size }
+ end
end
end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 07cfbb9ce3c..ebb07de9d29 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -63,7 +63,7 @@ module Ci
def clone_build(build)
project.builds.new(build_attributes(build)).tap do |new_build|
- new_build.assign_attributes(::Gitlab::Ci::Pipeline::Seed::Build.environment_attributes_for(new_build))
+ new_build.assign_attributes(deployment_attributes_for(new_build, build))
end
end
@@ -75,6 +75,11 @@ module Ci
attributes[:user] = current_user
attributes
end
+
+ def deployment_attributes_for(new_build, old_build)
+ ::Gitlab::Ci::Pipeline::Seed::Build
+ .deployment_attributes_for(new_build, old_build.persisted_environment)
+ end
end
end
diff --git a/app/services/ci/unlock_artifacts_service.rb b/app/services/ci/unlock_artifacts_service.rb
index 7c169cb8395..30da31ba8ec 100644
--- a/app/services/ci/unlock_artifacts_service.rb
+++ b/app/services/ci/unlock_artifacts_service.rb
@@ -5,22 +5,84 @@ module Ci
BATCH_SIZE = 100
def execute(ci_ref, before_pipeline = nil)
- query = <<~SQL.squish
- UPDATE "ci_pipelines"
- SET "locked" = #{::Ci::Pipeline.lockeds[:unlocked]}
- WHERE "ci_pipelines"."id" in (
- #{collect_pipelines(ci_ref, before_pipeline).select(:id).to_sql}
- LIMIT #{BATCH_SIZE}
- FOR UPDATE SKIP LOCKED
- )
- RETURNING "ci_pipelines"."id";
- SQL
-
- loop do
- break if Ci::Pipeline.connection.exec_query(query).empty?
+ results = {
+ unlocked_pipelines: 0,
+ unlocked_job_artifacts: 0
+ }
+
+ if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, ci_ref.project)
+ loop do
+ unlocked_pipelines = []
+ unlocked_job_artifacts = []
+
+ ::Ci::Pipeline.transaction do
+ unlocked_pipelines = unlock_pipelines(ci_ref, before_pipeline)
+ unlocked_job_artifacts = unlock_job_artifacts(unlocked_pipelines)
+ end
+
+ break if unlocked_pipelines.empty?
+
+ results[:unlocked_pipelines] += unlocked_pipelines.length
+ results[:unlocked_job_artifacts] += unlocked_job_artifacts.length
+ end
+ else
+ query = <<~SQL.squish
+ UPDATE "ci_pipelines"
+ SET "locked" = #{::Ci::Pipeline.lockeds[:unlocked]}
+ WHERE "ci_pipelines"."id" in (
+ #{collect_pipelines(ci_ref, before_pipeline).select(:id).to_sql}
+ LIMIT #{BATCH_SIZE}
+ FOR UPDATE SKIP LOCKED
+ )
+ RETURNING "ci_pipelines"."id";
+ SQL
+
+ loop do
+ unlocked_pipelines = Ci::Pipeline.connection.exec_query(query)
+
+ break if unlocked_pipelines.empty?
+
+ results[:unlocked_pipelines] += unlocked_pipelines.length
+ end
end
+
+ results
end
+ # rubocop:disable CodeReuse/ActiveRecord
+ def unlock_job_artifacts_query(pipeline_ids)
+ ci_job_artifacts = ::Ci::JobArtifact.arel_table
+
+ build_ids = ::Ci::Build.select(:id).where(commit_id: pipeline_ids)
+
+ returning = Arel::Nodes::Grouping.new(ci_job_artifacts[:id])
+
+ Arel::UpdateManager.new
+ .table(ci_job_artifacts)
+ .where(ci_job_artifacts[:job_id].in(Arel.sql(build_ids.to_sql)))
+ .set([[ci_job_artifacts[:locked], ::Ci::JobArtifact.lockeds[:unlocked]]])
+ .to_sql + " RETURNING #{returning.to_sql}"
+ end
+ # rubocop:enable CodeReuse/ActiveRecord
+
+ # rubocop:disable CodeReuse/ActiveRecord
+ def unlock_pipelines_query(ci_ref, before_pipeline)
+ ci_pipelines = ::Ci::Pipeline.arel_table
+
+ pipelines_scope = ci_ref.pipelines.artifacts_locked
+ pipelines_scope = pipelines_scope.before_pipeline(before_pipeline) if before_pipeline
+ pipelines_scope = pipelines_scope.select(:id).limit(BATCH_SIZE).lock('FOR UPDATE SKIP LOCKED')
+
+ returning = Arel::Nodes::Grouping.new(ci_pipelines[:id])
+
+ Arel::UpdateManager.new
+ .table(ci_pipelines)
+ .where(ci_pipelines[:id].in(Arel.sql(pipelines_scope.to_sql)))
+ .set([[ci_pipelines[:locked], ::Ci::Pipeline.lockeds[:unlocked]]])
+ .to_sql + " RETURNING #{returning.to_sql}"
+ end
+ # rubocop:enable CodeReuse/ActiveRecord
+
private
def collect_pipelines(ci_ref, before_pipeline)
@@ -29,5 +91,17 @@ module Ci
pipeline_scope.artifacts_locked
end
+
+ def unlock_job_artifacts(pipelines)
+ return if pipelines.empty?
+
+ ::Ci::JobArtifact.connection.exec_query(
+ unlock_job_artifacts_query(pipelines.rows.flatten)
+ )
+ end
+
+ def unlock_pipelines(ci_ref, before_pipeline)
+ ::Ci::Pipeline.connection.exec_query(unlock_pipelines_query(ci_ref, before_pipeline))
+ end
end
end
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
index 3b403f92486..826d9a2eda3 100644
--- a/app/services/ci/update_build_state_service.rb
+++ b/app/services/ci/update_build_state_service.rb
@@ -73,11 +73,11 @@ module Ci
::Gitlab::Ci::Trace::Checksum.new(build).then do |checksum|
unless checksum.valid?
metrics.increment_trace_operation(operation: :invalid)
- metrics.increment_error_counter(type: :chunks_invalid_checksum)
+ metrics.increment_error_counter(error_reason: :chunks_invalid_checksum)
if checksum.corrupted?
metrics.increment_trace_operation(operation: :corrupted)
- metrics.increment_error_counter(type: :chunks_invalid_size)
+ metrics.increment_error_counter(error_reason: :chunks_invalid_size)
end
next unless log_invalid_chunks?