Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-05-20 17:34:42 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-05-20 17:34:42 +0300
commit9f46488805e86b1bc341ea1620b866016c2ce5ed (patch)
treef9748c7e287041e37d6da49e0a29c9511dc34768 /app/services/ci
parentdfc92d081ea0332d69c8aca2f0e745cb48ae5e6d (diff)
Add latest changes from gitlab-org/gitlab@13-0-stable-ee
Diffstat (limited to 'app/services/ci')
-rw-r--r--app/services/ci/compare_accessibility_reports_service.rb17
-rw-r--r--app/services/ci/create_job_artifacts_service.rb17
-rw-r--r--app/services/ci/create_pipeline_service.rb21
-rw-r--r--app/services/ci/daily_build_group_report_result_service.rb (renamed from app/services/ci/daily_report_result_service.rb)11
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb8
-rw-r--r--app/services/ci/generate_terraform_reports_service.rb29
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service.rb2
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb19
-rw-r--r--app/services/ci/pipeline_schedule_service.rb14
-rw-r--r--app/services/ci/process_pipeline_service.rb13
-rw-r--r--app/services/ci/register_job_service.rb4
-rw-r--r--app/services/ci/retry_build_service.rb5
-rw-r--r--app/services/ci/retry_pipeline_service.rb2
-rw-r--r--app/services/ci/update_instance_variables_service.rb72
14 files changed, 177 insertions, 57 deletions
diff --git a/app/services/ci/compare_accessibility_reports_service.rb b/app/services/ci/compare_accessibility_reports_service.rb
new file mode 100644
index 00000000000..efb38d39d98
--- /dev/null
+++ b/app/services/ci/compare_accessibility_reports_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Ci
+ class CompareAccessibilityReportsService < CompareReportsBaseService
+ def comparer_class
+ Gitlab::Ci::Reports::AccessibilityReportsComparer
+ end
+
+ def serializer_class
+ AccessibilityReportsComparerSerializer
+ end
+
+ def get_report(pipeline)
+ pipeline&.accessibility_reports
+ end
+ end
+end
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
index 5d7d552dc5a..f0ffe67510b 100644
--- a/app/services/ci/create_job_artifacts_service.rb
+++ b/app/services/ci/create_job_artifacts_service.rb
@@ -46,6 +46,11 @@ module Ci
expire_in: expire_in)
end
+ if Feature.enabled?(:keep_latest_artifact_for_ref, job.project)
+ artifact.locked = true
+ artifact_metadata&.locked = true
+ end
+
[artifact, artifact_metadata]
end
@@ -56,6 +61,7 @@ module Ci
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(job, artifact)
+ when 'cluster_applications' then parse_cluster_applications_artifact(job, artifact)
else success
end
end
@@ -64,6 +70,7 @@ module Ci
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
+ unlock_previous_artifacts!(artifact)
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
@@ -81,6 +88,12 @@ module Ci
error(error.message, :bad_request)
end
+ def unlock_previous_artifacts!(artifact)
+ return unless Feature.enabled?(:keep_latest_artifact_for_ref, artifact.job.project)
+
+ Ci::JobArtifact.for_ref(artifact.job.ref, artifact.project_id).locked.update_all(locked: false)
+ end
+
def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
@@ -99,5 +112,9 @@ module Ci
def parse_dotenv_artifact(job, artifact)
Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact)
end
+
+ def parse_cluster_applications_artifact(job, artifact)
+ Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
+ end
end
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 347630f865f..922c3556362 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -102,21 +102,12 @@ module Ci
# rubocop: disable CodeReuse/ActiveRecord
def auto_cancelable_pipelines
- # TODO: Introduced by https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/23464
- if Feature.enabled?(:ci_support_interruptible_pipelines, project, default_enabled: true)
- project.ci_pipelines
- .where(ref: pipeline.ref)
- .where.not(id: pipeline.same_family_pipeline_ids)
- .where.not(sha: project.commit(pipeline.ref).try(:id))
- .alive_or_scheduled
- .with_only_interruptible_builds
- else
- project.ci_pipelines
- .where(ref: pipeline.ref)
- .where.not(id: pipeline.same_family_pipeline_ids)
- .where.not(sha: project.commit(pipeline.ref).try(:id))
- .created_or_pending
- end
+ project.ci_pipelines
+ .where(ref: pipeline.ref)
+ .where.not(id: pipeline.same_family_pipeline_ids)
+ .where.not(sha: project.commit(pipeline.ref).try(:id))
+ .alive_or_scheduled
+ .with_only_interruptible_builds
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/ci/daily_report_result_service.rb b/app/services/ci/daily_build_group_report_result_service.rb
index b774a806203..6cdf3c88f8c 100644
--- a/app/services/ci/daily_report_result_service.rb
+++ b/app/services/ci/daily_build_group_report_result_service.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
module Ci
- class DailyReportResultService
+ class DailyBuildGroupReportResultService
def execute(pipeline)
return unless Feature.enabled?(:ci_daily_code_coverage, pipeline.project, default_enabled: true)
- DailyReportResult.upsert_reports(coverage_reports(pipeline))
+ DailyBuildGroupReportResult.upsert_reports(coverage_reports(pipeline))
end
private
@@ -14,15 +14,16 @@ module Ci
base_attrs = {
project_id: pipeline.project_id,
ref_path: pipeline.source_ref_path,
- param_type: DailyReportResult.param_types[:coverage],
date: pipeline.created_at.to_date,
last_pipeline_id: pipeline.id
}
aggregate(pipeline.builds.with_coverage).map do |group_name, group|
base_attrs.merge(
- title: group_name,
- value: average_coverage(group)
+ group_name: group_name,
+ data: {
+ 'coverage' => average_coverage(group)
+ }
)
end
end
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index 7d2f5d33fed..5deb84812ac 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -28,7 +28,13 @@ module Ci
private
def destroy_batch
- artifacts = Ci::JobArtifact.expired(BATCH_SIZE).to_a
+ artifact_batch = if Feature.enabled?(:keep_latest_artifact_for_ref)
+ Ci::JobArtifact.expired(BATCH_SIZE).unlocked
+ else
+ Ci::JobArtifact.expired(BATCH_SIZE)
+ end
+
+ artifacts = artifact_batch.to_a
return false if artifacts.empty?
diff --git a/app/services/ci/generate_terraform_reports_service.rb b/app/services/ci/generate_terraform_reports_service.rb
new file mode 100644
index 00000000000..d768ce777d4
--- /dev/null
+++ b/app/services/ci/generate_terraform_reports_service.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Ci
+ # TODO: a couple of points with this approach:
+ # + reuses existing architecture and reactive caching
+ # - it's not a report comparison and some comparing features must be turned off.
+ # see CompareReportsBaseService for more notes.
+ # issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
+ class GenerateTerraformReportsService < CompareReportsBaseService
+ def execute(base_pipeline, head_pipeline)
+ {
+ status: :parsed,
+ key: key(base_pipeline, head_pipeline),
+ data: head_pipeline.terraform_reports.plans
+ }
+ rescue => e
+ Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
+ {
+ status: :error,
+ key: key(base_pipeline, head_pipeline),
+ status_reason: _('An error occurred while fetching terraform reports.')
+ }
+ end
+
+ def latest?(base_pipeline, head_pipeline, data)
+ data&.fetch(:key, nil) == key(base_pipeline, head_pipeline)
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service.rb b/app/services/ci/pipeline_processing/atomic_processing_service.rb
index 2a1bf15b9a3..b01a9d2e3b8 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service.rb
@@ -95,7 +95,7 @@ module Ci
def processable_status(processable)
if processable.scheduling_type_dag?
# Processable uses DAG, get status of all dependent needs
- @collection.status_for_names(processable.aggregated_needs_names.to_a)
+ @collection.status_for_names(processable.aggregated_needs_names.to_a, dag: true)
else
# Processable uses Stages, get status of prior stage
@collection.status_for_prior_stage_position(processable.stage_idx.to_i)
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
index 42e38a5c80f..2228328882d 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -32,14 +32,14 @@ module Ci
# This methods gets composite status of all processables
def status_of_all
- status_for_array(all_statuses)
+ status_for_array(all_statuses, dag: false)
end
# This methods gets composite status for processables with given names
- def status_for_names(names)
+ def status_for_names(names, dag:)
name_statuses = all_statuses_by_name.slice(*names)
- status_for_array(name_statuses.values)
+ status_for_array(name_statuses.values, dag: dag)
end
# This methods gets composite status for processables before given stage
@@ -48,7 +48,7 @@ module Ci
stage_statuses = all_statuses_grouped_by_stage_position
.select { |stage_position, _| stage_position < position }
- status_for_array(stage_statuses.values.flatten)
+ status_for_array(stage_statuses.values.flatten, dag: false)
end
end
@@ -65,7 +65,7 @@ module Ci
strong_memoize("status_for_stage_position_#{current_position}") do
stage_statuses = all_statuses_grouped_by_stage_position[current_position].to_a
- status_for_array(stage_statuses.flatten)
+ status_for_array(stage_statuses.flatten, dag: false)
end
end
@@ -76,7 +76,14 @@ module Ci
private
- def status_for_array(statuses)
+ def status_for_array(statuses, dag:)
+ # TODO: This is hack to support
+ # the same exact behaviour for Atomic and Legacy processing
+ # that DAG is blocked from executing if dependent is not "complete"
+ if dag && statuses.any? { |status| HasStatus::COMPLETED_STATUSES.exclude?(status[:status]) }
+ return 'pending'
+ end
+
result = Gitlab::Ci::Status::Composite
.new(statuses)
.status
diff --git a/app/services/ci/pipeline_schedule_service.rb b/app/services/ci/pipeline_schedule_service.rb
index 6028643489d..596c3b80bda 100644
--- a/app/services/ci/pipeline_schedule_service.rb
+++ b/app/services/ci/pipeline_schedule_service.rb
@@ -6,19 +6,7 @@ module Ci
# Ensure `next_run_at` is set properly before creating a pipeline.
# Otherwise, multiple pipelines could be created in a short interval.
schedule.schedule_next_run!
-
- if Feature.enabled?(:ci_pipeline_schedule_async)
- RunPipelineScheduleWorker.perform_async(schedule.id, schedule.owner&.id)
- else
- begin
- RunPipelineScheduleWorker.new.perform(schedule.id, schedule.owner&.id)
- ensure
- ##
- # This is the temporary solution for avoiding the memory bloat.
- # See more https://gitlab.com/gitlab-org/gitlab-foss/issues/61955
- GC.start if Feature.enabled?(:ci_pipeline_schedule_force_gc, default_enabled: true)
- end
- end
+ RunPipelineScheduleWorker.perform_async(schedule.id, schedule.owner&.id)
end
end
end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index d1efa19eb0d..3f23e81dcdd 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -10,7 +10,6 @@ module Ci
def execute(trigger_build_ids = nil, initial_process: false)
update_retried
- ensure_scheduling_type_for_processables
if Feature.enabled?(:ci_atomic_processing, pipeline.project)
Ci::PipelineProcessing::AtomicProcessingService
@@ -44,17 +43,5 @@ module Ci
.update_all(retried: true) if latest_statuses.any?
end
# rubocop: enable CodeReuse/ActiveRecord
-
- # Set scheduling type of processables if they were created before scheduling_type
- # data was deployed (https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22246).
- # Given that this service runs multiple times during the pipeline
- # life cycle we need to ensure we populate the data once.
- # See more: https://gitlab.com/gitlab-org/gitlab/issues/205426
- def ensure_scheduling_type_for_processables
- lease = Gitlab::ExclusiveLease.new("set-scheduling-types:#{pipeline.id}", timeout: 1.hour.to_i)
- return unless lease.try_obtain
-
- pipeline.processables.populate_scheduling_type!
- end
end
end
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index fb59797a8df..17b9e56636b 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -85,8 +85,6 @@ module Ci
# to make sure that this is properly handled by runner.
Result.new(nil, false)
rescue => ex
- raise ex unless Feature.enabled?(:ci_doom_build, default_enabled: true)
-
scheduler_failure!(build)
track_exception_for_build(ex, build)
@@ -203,7 +201,7 @@ module Ci
labels[:shard] = shard.gsub(METRICS_SHARD_TAG_PREFIX, '') if shard
end
- job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil?
+ job_queue_duration_seconds.observe(labels, Time.current - job.queued_at) unless job.queued_at.nil?
attempt_counter.increment
end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index a65fe2ecb3a..23507a31c72 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -9,6 +9,8 @@ module Ci
resource_group scheduling_type].freeze
def execute(build)
+ build.ensure_scheduling_type!
+
reprocess!(build).tap do |new_build|
build.pipeline.mark_as_processable_after_stage(build.stage_idx)
@@ -31,6 +33,9 @@ module Ci
end.to_h
attributes[:user] = current_user
+
+ # TODO: we can probably remove this logic
+ # see: https://gitlab.com/gitlab-org/gitlab/-/issues/217930
attributes[:scheduling_type] ||= build.find_legacy_scheduling_type
Ci::Build.transaction do
diff --git a/app/services/ci/retry_pipeline_service.rb b/app/services/ci/retry_pipeline_service.rb
index 9bb236ac44c..4229be6c7d7 100644
--- a/app/services/ci/retry_pipeline_service.rb
+++ b/app/services/ci/retry_pipeline_service.rb
@@ -11,6 +11,8 @@ module Ci
needs = Set.new
+ pipeline.ensure_scheduling_type!
+
pipeline.retryable_builds.preload_needs.find_each do |build|
next unless can?(current_user, :update_build, build)
diff --git a/app/services/ci/update_instance_variables_service.rb b/app/services/ci/update_instance_variables_service.rb
new file mode 100644
index 00000000000..ee513647d08
--- /dev/null
+++ b/app/services/ci/update_instance_variables_service.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+# This class is a simplified version of assign_nested_attributes_for_collection_association from ActiveRecord
+# https://github.com/rails/rails/blob/v6.0.2.1/activerecord/lib/active_record/nested_attributes.rb#L466
+
+module Ci
+ class UpdateInstanceVariablesService
+ UNASSIGNABLE_KEYS = %w(id _destroy).freeze
+
+ def initialize(params)
+ @params = params[:variables_attributes]
+ end
+
+ def execute
+ instantiate_records
+ persist_records
+ end
+
+ def errors
+ @records.to_a.flat_map { |r| r.errors.full_messages }
+ end
+
+ private
+
+ attr_reader :params
+
+ def existing_records_by_id
+ @existing_records_by_id ||= Ci::InstanceVariable
+ .all
+ .index_by { |var| var.id.to_s }
+ end
+
+ def instantiate_records
+ @records = params.map do |attributes|
+ find_or_initialize_record(attributes).tap do |record|
+ record.assign_attributes(attributes.except(*UNASSIGNABLE_KEYS))
+ record.mark_for_destruction if has_destroy_flag?(attributes)
+ end
+ end
+ end
+
+ def find_or_initialize_record(attributes)
+ id = attributes[:id].to_s
+
+ if id.blank?
+ Ci::InstanceVariable.new
+ else
+ existing_records_by_id.fetch(id) { raise ActiveRecord::RecordNotFound }
+ end
+ end
+
+ def persist_records
+ Ci::InstanceVariable.transaction do
+ success = @records.map do |record|
+ if record.marked_for_destruction?
+ record.destroy
+ else
+ record.save
+ end
+ end.all?
+
+ raise ActiveRecord::Rollback unless success
+
+ success
+ end
+ end
+
+ def has_destroy_flag?(hash)
+ Gitlab::Utils.to_boolean(hash['_destroy'])
+ end
+ end
+end