Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'app/models/ci')
-rw-r--r--app/models/ci/bridge.rb6
-rw-r--r--app/models/ci/build.rb55
-rw-r--r--app/models/ci/build_dependencies.rb82
-rw-r--r--app/models/ci/build_trace_chunks/fog.rb29
-rw-r--r--app/models/ci/job_artifact.rb8
-rw-r--r--app/models/ci/pipeline.rb130
6 files changed, 277 insertions, 33 deletions
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index 5b23cf46fdb..19a0d424e33 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -132,14 +132,10 @@ module Ci
end
def playable?
- return false unless ::Gitlab::Ci::Features.manual_bridges_enabled?(project)
-
action? && !archived? && manual?
end
def action?
- return false unless ::Gitlab::Ci::Features.manual_bridges_enabled?(project)
-
%w[manual].include?(self.when)
end
@@ -206,7 +202,7 @@ module Ci
override :dependency_variables
def dependency_variables
- return [] unless ::Feature.enabled?(:ci_bridge_dependency_variables, project)
+ return [] unless ::Feature.enabled?(:ci_bridge_dependency_variables, project, default_enabled: true)
super
end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 84abd01786d..71939f070cb 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -190,6 +190,8 @@ module Ci
scope :with_coverage, -> { where.not(coverage: nil) }
+ scope :for_project, -> (project_id) { where(project_id: project_id) }
+
acts_as_taggable
add_authentication_token_field :token, encrypted: :optional
@@ -379,8 +381,16 @@ module Ci
Ci::BuildRunnerSession.where(build: build).delete_all
end
- after_transition any => [:skipped, :canceled] do |build|
- build.deployment&.cancel
+ after_transition any => [:skipped, :canceled] do |build, transition|
+ if Feature.enabled?(:cd_skipped_deployment_status, build.project)
+ if transition.to_name == :skipped
+ build.deployment&.skip
+ else
+ build.deployment&.cancel
+ end
+ else
+ build.deployment&.cancel
+ end
end
end
@@ -527,6 +537,7 @@ module Ci
strong_memoize(:variables) do
Gitlab::Ci::Variables::Collection.new
.concat(persisted_variables)
+ .concat(dependency_proxy_variables)
.concat(job_jwt_variables)
.concat(scoped_variables)
.concat(job_variables)
@@ -575,6 +586,15 @@ module Ci
end
end
+ def dependency_proxy_variables
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ break variables unless Gitlab.config.dependency_proxy.enabled
+
+ variables.append(key: 'CI_DEPENDENCY_PROXY_USER', value: ::Gitlab::Auth::CI_JOB_USER)
+ variables.append(key: 'CI_DEPENDENCY_PROXY_PASSWORD', value: token.to_s, public: false, masked: true)
+ end
+ end
+
def features
{ trace_sections: true }
end
@@ -908,13 +928,33 @@ module Ci
end
def collect_coverage_reports!(coverage_report)
+ project_path, worktree_paths = if Feature.enabled?(:smart_cobertura_parser, project)
+ # If the flag is disabled, we intentionally pass nil
+ # for both project_path and worktree_paths to fallback
+ # to the non-smart behavior of the parser
+ [project.full_path, pipeline.all_worktree_paths]
+ end
+
each_report(Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES) do |file_type, blob|
- Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, coverage_report)
+ Gitlab::Ci::Parsers.fabricate!(file_type).parse!(
+ blob,
+ coverage_report,
+ project_path: project_path,
+ worktree_paths: worktree_paths
+ )
end
coverage_report
end
+ def collect_codequality_reports!(codequality_report)
+ each_report(Ci::JobArtifact::CODEQUALITY_REPORT_FILE_TYPES) do |file_type, blob|
+ Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, codequality_report)
+ end
+
+ codequality_report
+ end
+
def collect_terraform_reports!(terraform_reports)
each_report(::Ci::JobArtifact::TERRAFORM_REPORT_FILE_TYPES) do |file_type, blob, report_artifact|
::Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, terraform_reports, artifact: report_artifact)
@@ -966,6 +1006,15 @@ module Ci
::Gitlab.com? ? 500_000 : 0
end
+ def debug_mode?
+ return false unless Feature.enabled?(:restrict_access_to_build_debug_mode, default_enabled: true)
+
+ # TODO: Have `debug_mode?` check against data on sent back from runner
+ # to capture all the ways that variables can be set.
+ # See (https://gitlab.com/gitlab-org/gitlab/-/issues/290955)
+ variables.any? { |variable| variable[:key] == 'CI_DEBUG_TRACE' && variable[:value].casecmp('true') == 0 }
+ end
+
protected
def run_status_commit_hooks!
diff --git a/app/models/ci/build_dependencies.rb b/app/models/ci/build_dependencies.rb
index 2fcd1708cf4..a6abeb517c1 100644
--- a/app/models/ci/build_dependencies.rb
+++ b/app/models/ci/build_dependencies.rb
@@ -2,6 +2,8 @@
module Ci
class BuildDependencies
+ include ::Gitlab::Utils::StrongMemoize
+
attr_reader :processable
def initialize(processable)
@@ -9,7 +11,7 @@ module Ci
end
def all
- (local + cross_pipeline).uniq
+ (local + cross_pipeline + cross_project).uniq
end
# Dependencies local to the given pipeline
@@ -23,8 +25,16 @@ module Ci
deps
end
- # Dependencies that are defined in other pipelines
+ # Dependencies from the same parent-pipeline hierarchy excluding
+ # the current job's pipeline
def cross_pipeline
+ strong_memoize(:cross_pipeline) do
+ fetch_dependencies_in_hierarchy
+ end
+ end
+
+ # Dependencies that are defined by project and ref
+ def cross_project
[]
end
@@ -33,7 +43,7 @@ module Ci
end
def valid?
- valid_local? && valid_cross_pipeline?
+ valid_local? && valid_cross_pipeline? && valid_cross_project?
end
private
@@ -44,13 +54,61 @@ module Ci
::Ci::Build
end
+ def fetch_dependencies_in_hierarchy
+ deps_specifications = specified_cross_pipeline_dependencies
+ return [] if deps_specifications.empty?
+
+ deps_specifications = expand_variables_and_validate(deps_specifications)
+ jobs_in_pipeline_hierarchy(deps_specifications)
+ end
+
+ def jobs_in_pipeline_hierarchy(deps_specifications)
+ all_pipeline_ids = []
+ all_job_names = []
+
+ deps_specifications.each do |spec|
+ all_pipeline_ids << spec[:pipeline]
+ all_job_names << spec[:job]
+ end
+
+ model_class.latest.success
+ .in_pipelines(processable.pipeline.same_family_pipeline_ids)
+ .in_pipelines(all_pipeline_ids.uniq)
+ .by_name(all_job_names.uniq)
+ .select do |dependency|
+ # the query may not return exact matches pipeline-job, so we filter
+ # them separately.
+ deps_specifications.find do |spec|
+ spec[:pipeline] == dependency.pipeline_id &&
+ spec[:job] == dependency.name
+ end
+ end
+ end
+
+ def expand_variables_and_validate(specifications)
+ specifications.map do |spec|
+ pipeline = ExpandVariables.expand(spec[:pipeline].to_s, processable_variables).to_i
+ # current pipeline is not allowed because local dependencies
+ # should be used instead.
+ next if pipeline == processable.pipeline_id
+
+ job = ExpandVariables.expand(spec[:job], processable_variables)
+
+ { job: job, pipeline: pipeline }
+ end.compact
+ end
+
+ def valid_cross_pipeline?
+ cross_pipeline.size == specified_cross_pipeline_dependencies.size
+ end
+
def valid_local?
return true if Feature.enabled?(:ci_disable_validates_dependencies)
local.all?(&:valid_dependency?)
end
- def valid_cross_pipeline?
+ def valid_cross_project?
true
end
@@ -78,6 +136,22 @@ module Ci
scope.where(name: processable.options[:dependencies])
end
+
+ def processable_variables
+ -> { processable.simple_variables_without_dependencies }
+ end
+
+ def specified_cross_pipeline_dependencies
+ strong_memoize(:specified_cross_pipeline_dependencies) do
+ next [] unless Feature.enabled?(:ci_cross_pipeline_artifacts_download, processable.project, default_enabled: true)
+
+ specified_cross_dependencies.select { |dep| dep[:pipeline] && dep[:artifacts] }
+ end
+ end
+
+ def specified_cross_dependencies
+ Array(processable.options[:cross_dependencies])
+ end
end
end
diff --git a/app/models/ci/build_trace_chunks/fog.rb b/app/models/ci/build_trace_chunks/fog.rb
index d3051e3dadc..27b579bf428 100644
--- a/app/models/ci/build_trace_chunks/fog.rb
+++ b/app/models/ci/build_trace_chunks/fog.rb
@@ -14,11 +14,15 @@ module Ci
end
def set_data(model, new_data)
- # TODO: Support AWS S3 server side encryption
- files.create({
- key: key(model),
- body: new_data
- })
+ if Feature.enabled?(:ci_live_trace_use_fog_attributes, default_enabled: true)
+ files.create(create_attributes(model, new_data))
+ else
+ # TODO: Support AWS S3 server side encryption
+ files.create({
+ key: key(model),
+ body: new_data
+ })
+ end
end
def append_data(model, new_data, offset)
@@ -57,6 +61,13 @@ module Ci
key_raw(model.build_id, model.chunk_index)
end
+ def create_attributes(model, new_data)
+ {
+ key: key(model),
+ body: new_data
+ }.merge(object_store_config.fog_attributes)
+ end
+
def key_raw(build_id, chunk_index)
"tmp/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log"
end
@@ -84,6 +95,14 @@ module Ci
def object_store
Gitlab.config.artifacts.object_store
end
+
+ def object_store_raw_config
+ object_store
+ end
+
+ def object_store_config
+ @object_store_config ||= ::ObjectStorage::Config.new(object_store_raw_config)
+ end
end
end
end
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 7cedd13b407..c80d50ea131 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -7,15 +7,13 @@ module Ci
include UpdateProjectStatistics
include UsageStatistics
include Sortable
- include IgnorableColumns
include Artifactable
include FileStoreMounter
extend Gitlab::Ci::Model
- ignore_columns :locked, remove_after: '2020-07-22', remove_with: '13.4'
-
TEST_REPORT_FILE_TYPES = %w[junit].freeze
COVERAGE_REPORT_FILE_TYPES = %w[cobertura].freeze
+ CODEQUALITY_REPORT_FILE_TYPES = %w[codequality].freeze
ACCESSIBILITY_REPORT_FILE_TYPES = %w[accessibility].freeze
NON_ERASABLE_FILE_TYPES = %w[trace].freeze
TERRAFORM_REPORT_FILE_TYPES = %w[terraform].freeze
@@ -157,6 +155,10 @@ module Ci
with_file_types(COVERAGE_REPORT_FILE_TYPES)
end
+ scope :codequality_reports, -> do
+ with_file_types(CODEQUALITY_REPORT_FILE_TYPES)
+ end
+
scope :terraform_reports, -> do
with_file_types(TERRAFORM_REPORT_FILE_TYPES)
end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 8707d635e03..5e5f51d776f 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -7,6 +7,7 @@ module Ci
include Importable
include AfterCommitQueue
include Presentable
+ include Gitlab::Allowable
include Gitlab::OptimisticLocking
include Gitlab::Utils::StrongMemoize
include AtomicInternalId
@@ -16,6 +17,8 @@ module Ci
include FromUnion
include UpdatedAtFilterable
+ MAX_OPEN_MERGE_REQUESTS_REFS = 4
+
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
project: [:project_feature, :route, { namespace: :route }]
}.freeze
@@ -104,7 +107,6 @@ module Ci
accepts_nested_attributes_for :variables, reject_if: :persisted?
- delegate :id, to: :project, prefix: true
delegate :full_path, to: :project, prefix: true
validates :sha, presence: { unless: :importing? }
@@ -259,6 +261,22 @@ module Ci
end
end
+ after_transition any => any do |pipeline|
+ next unless Feature.enabled?(:jira_sync_builds, pipeline.project)
+
+ pipeline.run_after_commit do
+ # Passing the seq-id ensures this is idempotent
+ seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id
+ ::JiraConnect::SyncBuildsWorker.perform_async(pipeline.id, seq_id)
+ end
+ end
+
+ after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
+ pipeline.run_after_commit do
+ ::Ci::TestFailureHistoryService.new(pipeline).async.perform_if_needed # rubocop: disable CodeReuse/ServiceClass
+ end
+ end
+
after_transition any => [:success, :failed] do |pipeline|
ref_status = pipeline.ci_ref&.update_status_by!(pipeline)
@@ -277,15 +295,17 @@ module Ci
scope :internal, -> { where(source: internal_sources) }
scope :no_child, -> { where.not(source: :parent_pipeline) }
scope :ci_sources, -> { where(source: Enums::Ci::Pipeline.ci_sources.values) }
+ scope :ci_branch_sources, -> { where(source: Enums::Ci::Pipeline.ci_branch_sources.values) }
scope :ci_and_parent_sources, -> { where(source: Enums::Ci::Pipeline.ci_and_parent_sources.values) }
scope :for_user, -> (user) { where(user: user) }
scope :for_sha, -> (sha) { where(sha: sha) }
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
scope :for_sha_or_source_sha, -> (sha) { for_sha(sha).or(for_source_sha(sha)) }
scope :for_ref, -> (ref) { where(ref: ref) }
+ scope :for_branch, -> (branch) { for_ref(branch).where(tag: false) }
scope :for_id, -> (id) { where(id: id) }
scope :for_iid, -> (iid) { where(iid: iid) }
- scope :for_project, -> (project) { where(project: project) }
+ scope :for_project, -> (project_id) { where(project_id: project_id) }
scope :created_after, -> (time) { where('ci_pipelines.created_at > ?', time) }
scope :created_before_id, -> (id) { where('ci_pipelines.id < ?', id) }
scope :before_pipeline, -> (pipeline) { created_before_id(pipeline.id).outside_pipeline_family(pipeline) }
@@ -310,9 +330,9 @@ module Ci
# In general, please use `Ci::PipelinesForMergeRequestFinder` instead,
# for checking permission of the actor.
scope :triggered_by_merge_request, -> (merge_request) do
- ci_sources.where(source: :merge_request_event,
- merge_request: merge_request,
- project: [merge_request.source_project, merge_request.target_project])
+ where(source: :merge_request_event,
+ merge_request: merge_request,
+ project: [merge_request.source_project, merge_request.target_project])
end
# Returns the pipelines in descending order (= newest first), optionally
@@ -774,9 +794,20 @@ module Ci
variables.append(key: 'CI_MERGE_REQUEST_EVENT_TYPE', value: merge_request_event_type.to_s)
variables.append(key: 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA', value: source_sha.to_s)
variables.append(key: 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA', value: target_sha.to_s)
+
+ diff = self.merge_request_diff
+ if diff.present?
+ variables.append(key: 'CI_MERGE_REQUEST_DIFF_ID', value: diff.id.to_s)
+ variables.append(key: 'CI_MERGE_REQUEST_DIFF_BASE_SHA', value: diff.base_commit_sha)
+ end
+
variables.concat(merge_request.predefined_variables)
end
+ if Gitlab::Ci::Features.pipeline_open_merge_requests?(project) && open_merge_requests_refs.any?
+ variables.append(key: 'CI_OPEN_MERGE_REQUESTS', value: open_merge_requests_refs.join(','))
+ end
+
variables.append(key: 'CI_KUBERNETES_ACTIVE', value: 'true') if has_kubernetes_active?
variables.append(key: 'CI_DEPLOY_FREEZE', value: 'true') if freeze_period?
@@ -824,9 +855,8 @@ module Ci
end
def execute_hooks
- data = pipeline_data
- project.execute_hooks(data, :pipeline_hooks)
- project.execute_services(data, :pipeline_hooks)
+ project.execute_hooks(pipeline_data, :pipeline_hooks) if project.has_active_hooks?(:pipeline_hooks)
+ project.execute_services(pipeline_data, :pipeline_hooks) if project.has_active_services?(:pipeline_hooks)
end
# All the merge requests for which the current pipeline runs/ran against
@@ -844,9 +874,39 @@ module Ci
all_merge_requests.order(id: :desc)
end
+ # This returns a list of MRs that point
+ # to the same source project/branch
+ def related_merge_requests
+ if merge_request?
+ # We look for all other MRs that this branch might be pointing to
+ MergeRequest.where(
+ source_project_id: merge_request.source_project_id,
+ source_branch: merge_request.source_branch)
+ else
+ MergeRequest.where(
+ source_project_id: project_id,
+ source_branch: ref)
+ end
+ end
+
+ # We cannot use `all_merge_requests`, due to race condition
+ # This returns a list of at most 4 open MRs
+ def open_merge_requests_refs
+ strong_memoize(:open_merge_requests_refs) do
+ # We ensure that triggering user can actually read the pipeline
+ related_merge_requests
+ .opened
+ .limit(MAX_OPEN_MERGE_REQUESTS_REFS)
+ .order(id: :desc)
+ .preload(:target_project)
+ .select { |mr| can?(user, :read_merge_request, mr) }
+ .map { |mr| mr.to_reference(project, full: true) }
+ end
+ end
+
def same_family_pipeline_ids
::Gitlab::Ci::PipelineObjectHierarchy.new(
- base_and_ancestors(same_project: true), options: { same_project: true }
+ self.class.where(id: root_ancestor), options: { same_project: true }
).base_and_descendants.select(:id)
end
@@ -869,6 +929,15 @@ module Ci
.base_and_descendants
end
+ def root_ancestor
+ return self unless child?
+
+ Gitlab::Ci::PipelineObjectHierarchy
+ .new(self.class.unscoped.where(id: id), options: { same_project: true })
+ .base_and_ancestors(hierarchy_order: :desc)
+ .first
+ end
+
def bridge_triggered?
source_bridge.present?
end
@@ -878,7 +947,8 @@ module Ci
end
def child?
- parent_pipeline.present?
+ parent_pipeline? && # child pipelines have `parent_pipeline` source
+ parent_pipeline.present?
end
def parent?
@@ -910,10 +980,18 @@ module Ci
builds.latest.with_reports(reports_scope)
end
+ def latest_test_report_builds
+ latest_report_builds(Ci::JobArtifact.test_reports).preload(:project)
+ end
+
def builds_with_coverage
builds.latest.with_coverage
end
+ def builds_with_failed_tests(limit: nil)
+ latest_test_report_builds.failed.limit(limit)
+ end
+
def has_reports?(reports_scope)
complete? && latest_report_builds(reports_scope).exists?
end
@@ -934,7 +1012,7 @@ module Ci
def test_reports
Gitlab::Ci::Reports::TestReports.new.tap do |test_reports|
- latest_report_builds(Ci::JobArtifact.test_reports).preload(:project).find_each do |build|
+ latest_test_report_builds.find_each do |build|
build.collect_test_reports!(test_reports)
end
end
@@ -950,12 +1028,20 @@ module Ci
def coverage_reports
Gitlab::Ci::Reports::CoverageReports.new.tap do |coverage_reports|
- latest_report_builds(Ci::JobArtifact.coverage_reports).each do |build|
+ latest_report_builds(Ci::JobArtifact.coverage_reports).includes(:project).find_each do |build|
build.collect_coverage_reports!(coverage_reports)
end
end
end
+ def codequality_reports
+ Gitlab::Ci::Reports::CodequalityReports.new.tap do |codequality_reports|
+ latest_report_builds(Ci::JobArtifact.codequality_reports).each do |build|
+ build.collect_codequality_reports!(codequality_reports)
+ end
+ end
+ end
+
def terraform_reports
::Gitlab::Ci::Reports::TerraformReports.new.tap do |terraform_reports|
latest_report_builds(::Ci::JobArtifact.terraform_reports).each do |build|
@@ -1128,7 +1214,25 @@ module Ci
end
def pipeline_data
- Gitlab::DataBuilder::Pipeline.build(self)
+ strong_memoize(:pipeline_data) do
+ Gitlab::DataBuilder::Pipeline.build(self)
+ end
+ end
+
+ def merge_request_diff_sha
+ return unless merge_request?
+
+ if merge_request_pipeline?
+ source_sha
+ else
+ sha
+ end
+ end
+
+ def merge_request_diff
+ return unless merge_request?
+
+ merge_request.merge_request_diff_for(merge_request_diff_sha)
end
def push_details