Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'app/services')
-rw-r--r--app/services/alert_management/alerts/update_service.rb26
-rw-r--r--app/services/audit_event_service.rb7
-rw-r--r--app/services/auto_merge/base_service.rb15
-rw-r--r--app/services/bulk_imports/create_pipeline_trackers_service.rb2
-rw-r--r--app/services/ci/build_report_result_service.rb2
-rw-r--r--app/services/ci/external_pull_requests/create_pipeline_service.rb14
-rw-r--r--app/services/ci/generate_coverage_reports_service.rb13
-rw-r--r--app/services/ci/job_artifacts/create_service.rb15
-rw-r--r--app/services/ci/job_artifacts/destroy_batch_service.rb10
-rw-r--r--app/services/ci/pipeline_artifacts/coverage_report_service.rb30
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb2
-rw-r--r--app/services/ci/queue/build_queue_service.rb26
-rw-r--r--app/services/ci/queue/builds_table_strategy.rb75
-rw-r--r--app/services/ci/queue/pending_builds_strategy.rb27
-rw-r--r--app/services/ci/runners/reconcile_existing_runner_versions_service.rb90
-rw-r--r--app/services/ci/runners/register_runner_service.rb14
-rw-r--r--app/services/ci/test_failure_history_service.rb2
-rw-r--r--app/services/ci/update_build_queue_service.rb10
-rw-r--r--app/services/ci/update_pending_build_service.rb2
-rw-r--r--app/services/clusters/integrations/create_service.rb2
-rw-r--r--app/services/concerns/alert_management/alert_processing.rb18
-rw-r--r--app/services/concerns/integrations/project_test_data.rb2
-rw-r--r--app/services/concerns/work_items/widgetable_service.rb26
-rw-r--r--app/services/deployments/create_for_build_service.rb12
-rw-r--r--app/services/error_tracking/list_issues_service.rb1
-rw-r--r--app/services/event_create_service.rb37
-rw-r--r--app/services/feature_flags/base_service.rb4
-rw-r--r--app/services/feature_flags/create_service.rb2
-rw-r--r--app/services/feature_flags/destroy_service.rb2
-rw-r--r--app/services/feature_flags/update_service.rb2
-rw-r--r--app/services/git/branch_hooks_service.rb8
-rw-r--r--app/services/git/branch_push_service.rb4
-rw-r--r--app/services/google_cloud/base_service.rb65
-rw-r--r--app/services/google_cloud/create_service_accounts_service.rb18
-rw-r--r--app/services/google_cloud/enable_cloud_run_service.rb15
-rw-r--r--app/services/google_cloud/gcp_region_add_or_replace_service.rb4
-rw-r--r--app/services/google_cloud/generate_pipeline_service.rb2
-rw-r--r--app/services/google_cloud/service_accounts_service.rb36
-rw-r--r--app/services/google_cloud/setup_cloudsql_instance_service.rb74
-rw-r--r--app/services/gravatar_service.rb1
-rw-r--r--app/services/groups/base_service.rb4
-rw-r--r--app/services/groups/create_service.rb2
-rw-r--r--app/services/groups/group_links/destroy_service.rb2
-rw-r--r--app/services/groups/transfer_service.rb6
-rw-r--r--app/services/incident_management/issuable_escalation_statuses/after_update_service.rb22
-rw-r--r--app/services/incident_management/issuable_escalation_statuses/build_service.rb17
-rw-r--r--app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb2
-rw-r--r--app/services/incident_management/timeline_events/create_service.rb48
-rw-r--r--app/services/incident_management/timeline_events/update_service.rb5
-rw-r--r--app/services/issuable/clone/attributes_rewriter.rb126
-rw-r--r--app/services/issuable/clone/base_service.rb40
-rw-r--r--app/services/issuable/import_csv/base_service.rb3
-rw-r--r--app/services/issuable_base_service.rb23
-rw-r--r--app/services/issuable_links/create_service.rb2
-rw-r--r--app/services/issues/build_service.rb15
-rw-r--r--app/services/issues/clone_service.rb17
-rw-r--r--app/services/issues/close_service.rb5
-rw-r--r--app/services/issues/create_service.rb21
-rw-r--r--app/services/issues/move_service.rb1
-rw-r--r--app/services/issues/related_branches_service.rb33
-rw-r--r--app/services/issues/reopen_service.rb7
-rw-r--r--app/services/issues/update_service.rb3
-rw-r--r--app/services/members/create_service.rb2
-rw-r--r--app/services/members/creator_service.rb98
-rw-r--r--app/services/members/invite_member_builder.rb19
-rw-r--r--app/services/members/invite_service.rb4
-rw-r--r--app/services/members/projects/creator_service.rb2
-rw-r--r--app/services/members/standard_member_builder.rb23
-rw-r--r--app/services/merge_requests/approval_service.rb2
-rw-r--r--app/services/merge_requests/base_service.rb12
-rw-r--r--app/services/merge_requests/create_pipeline_service.rb8
-rw-r--r--app/services/namespaces/in_product_marketing_emails_service.rb5
-rw-r--r--app/services/notification_recipients/builder/base.rb10
-rw-r--r--app/services/packages/cleanup/execute_policy_service.rb98
-rw-r--r--app/services/packages/debian/create_package_file_service.rb3
-rw-r--r--app/services/packages/debian/extract_changes_metadata_service.rb3
-rw-r--r--app/services/packages/debian/generate_distribution_service.rb3
-rw-r--r--app/services/packages/mark_package_files_for_destruction_service.rb35
-rw-r--r--app/services/packages/pypi/create_package_service.rb2
-rw-r--r--app/services/pages/delete_service.rb3
-rw-r--r--app/services/pod_logs/base_service.rb91
-rw-r--r--app/services/pod_logs/elasticsearch_service.rb98
-rw-r--r--app/services/pod_logs/kubernetes_service.rb151
-rw-r--r--app/services/preview_markdown_service.rb2
-rw-r--r--app/services/projects/after_rename_service.rb13
-rw-r--r--app/services/projects/blame_service.rb9
-rw-r--r--app/services/projects/create_service.rb12
-rw-r--r--app/services/projects/destroy_service.rb41
-rw-r--r--app/services/projects/fork_service.rb16
-rw-r--r--app/services/projects/group_links/update_service.rb2
-rw-r--r--app/services/projects/move_deploy_keys_projects_service.rb11
-rw-r--r--app/services/projects/operations/update_service.rb10
-rw-r--r--app/services/projects/update_pages_service.rb143
-rw-r--r--app/services/projects/update_service.rb16
-rw-r--r--app/services/protected_branches/api_service.rb4
-rw-r--r--app/services/protected_refs/access_level_params.rb (renamed from app/services/protected_branches/access_level_params.rb)4
-rw-r--r--app/services/quick_actions/interpret_service.rb46
-rw-r--r--app/services/repositories/changelog_service.rb7
-rw-r--r--app/services/resource_access_tokens/create_service.rb2
-rw-r--r--app/services/search_service.rb17
-rw-r--r--app/services/service_ping/submit_service.rb13
-rw-r--r--app/services/system_notes/incidents_service.rb8
-rw-r--r--app/services/terraform/states/trigger_destroy_service.rb6
-rw-r--r--app/services/users/activity_service.rb2
-rw-r--r--app/services/web_hook_service.rb17
-rw-r--r--app/services/web_hooks/destroy_service.rb3
-rw-r--r--app/services/web_hooks/log_execution_service.rb1
-rw-r--r--app/services/work_items/create_and_link_service.rb6
-rw-r--r--app/services/work_items/create_from_task_service.rb3
-rw-r--r--app/services/work_items/create_service.rb23
-rw-r--r--app/services/work_items/parent_links/create_service.rb60
-rw-r--r--app/services/work_items/parent_links/destroy_service.rb32
-rw-r--r--app/services/work_items/task_list_reference_removal_service.rb15
-rw-r--r--app/services/work_items/task_list_reference_replacement_service.rb9
-rw-r--r--app/services/work_items/update_service.rb30
-rw-r--r--app/services/work_items/widgets/base_service.rb16
-rw-r--r--app/services/work_items/widgets/description_service/update_service.rb15
-rw-r--r--app/services/work_items/widgets/hierarchy_service/base_service.rb79
-rw-r--r--app/services/work_items/widgets/hierarchy_service/create_service.rb15
-rw-r--r--app/services/work_items/widgets/hierarchy_service/update_service.rb15
-rw-r--r--app/services/work_items/widgets/weight_service/update_service.rb15
121 files changed, 1352 insertions, 1179 deletions
diff --git a/app/services/alert_management/alerts/update_service.rb b/app/services/alert_management/alerts/update_service.rb
index 6bdceb0f27b..f273e15b159 100644
--- a/app/services/alert_management/alerts/update_service.rb
+++ b/app/services/alert_management/alerts/update_service.rb
@@ -12,7 +12,6 @@ module AlertManagement
@alert = alert
@param_errors = []
@status = params.delete(:status)
- @status_change_reason = params.delete(:status_change_reason)
super(project: alert.project, current_user: current_user, params: params)
end
@@ -37,7 +36,7 @@ module AlertManagement
private
- attr_reader :alert, :param_errors, :status, :status_change_reason
+ attr_reader :alert, :param_errors, :status
def allowed?
current_user&.can?(:update_alert_management_alert, alert)
@@ -130,37 +129,16 @@ module AlertManagement
def handle_status_change
add_status_change_system_note
resolve_todos if alert.resolved?
- sync_to_incident if should_sync_to_incident?
end
def add_status_change_system_note
- SystemNoteService.change_alert_status(alert, current_user, status_change_reason)
+ SystemNoteService.change_alert_status(alert, current_user)
end
def resolve_todos
todo_service.resolve_todos_for_target(alert, current_user)
end
- def sync_to_incident
- ::Issues::UpdateService.new(
- project: project,
- current_user: current_user,
- params: {
- escalation_status: {
- status: status,
- status_change_reason: " by changing the status of #{alert.to_reference(project)}"
- }
- }
- ).execute(alert.issue)
- end
-
- def should_sync_to_incident?
- alert.issue &&
- alert.issue.supports_escalation? &&
- alert.issue.escalation_status &&
- alert.issue.escalation_status.status != alert.status
- end
-
def filter_duplicate
# Only need to check if changing to a not-resolved status
return if params[:status_event].blank? || params[:status_event] == :resolve
diff --git a/app/services/audit_event_service.rb b/app/services/audit_event_service.rb
index 97debccfb18..26244a8bcc5 100644
--- a/app/services/audit_event_service.rb
+++ b/app/services/audit_event_service.rb
@@ -121,12 +121,15 @@ class AuditEventService
def log_security_event_to_database
return if Gitlab::Database.read_only?
- event = AuditEvent.new(base_payload.merge(details: @details))
+ event = build_event
save_or_track event
-
event
end
+ def build_event
+ AuditEvent.new(base_payload.merge(details: @details))
+ end
+
def stream_event_to_external_destinations(_event)
# Defined in EE
end
diff --git a/app/services/auto_merge/base_service.rb b/app/services/auto_merge/base_service.rb
index 2a32f0c74ac..9e49bd86ec0 100644
--- a/app/services/auto_merge/base_service.rb
+++ b/app/services/auto_merge/base_service.rb
@@ -63,21 +63,6 @@ module AutoMerge
end
end
- ##
- # NOTE: This method is to be removed when `disallow_to_create_merge_request_pipelines_in_target_project`
- # feature flag is removed.
- def self.can_add_to_merge_train?(merge_request)
- if ::Feature.enabled?(:ci_disallow_to_create_merge_request_pipelines_in_target_project, merge_request.target_project)
- merge_request.for_same_project?
- else
- true
- end
- end
-
- def can_add_to_merge_train?(merge_request)
- self.class.can_add_to_merge_train?(merge_request)
- end
-
private
# Overridden in child classes
diff --git a/app/services/bulk_imports/create_pipeline_trackers_service.rb b/app/services/bulk_imports/create_pipeline_trackers_service.rb
index 5c9c68e62b5..af97aec09b5 100644
--- a/app/services/bulk_imports/create_pipeline_trackers_service.rb
+++ b/app/services/bulk_imports/create_pipeline_trackers_service.rb
@@ -47,7 +47,7 @@ module BulkImports
end
def non_patch_source_version
- Gitlab::VersionInfo.new(source_version.major, source_version.minor, 0)
+ source_version.without_patch
end
def log_skipped_pipeline(pipeline, minimum_version, maximum_version)
diff --git a/app/services/ci/build_report_result_service.rb b/app/services/ci/build_report_result_service.rb
index 8bdb51320f9..f9146b3677a 100644
--- a/app/services/ci/build_report_result_service.rb
+++ b/app/services/ci/build_report_result_service.rb
@@ -22,7 +22,7 @@ module Ci
private
def generate_test_suite_report(build)
- build.collect_test_reports!(Gitlab::Ci::Reports::TestReports.new)
+ build.collect_test_reports!(Gitlab::Ci::Reports::TestReport.new)
end
def tests_params(test_suite)
diff --git a/app/services/ci/external_pull_requests/create_pipeline_service.rb b/app/services/ci/external_pull_requests/create_pipeline_service.rb
index 66127c94d35..ffc129eccda 100644
--- a/app/services/ci/external_pull_requests/create_pipeline_service.rb
+++ b/app/services/ci/external_pull_requests/create_pipeline_service.rb
@@ -10,24 +10,12 @@ module Ci
return pull_request_not_open_error unless pull_request.open?
return pull_request_branch_error unless pull_request.actual_branch_head?
- create_pipeline_for(pull_request)
- end
-
- private
-
- def create_pipeline_for(pull_request)
Ci::ExternalPullRequests::CreatePipelineWorker.perform_async(
project.id, current_user.id, pull_request.id
)
end
- def create_params(pull_request)
- {
- ref: pull_request.source_ref,
- source_sha: pull_request.source_sha,
- target_sha: pull_request.target_sha
- }
- end
+ private
def pull_request_not_open_error
ServiceResponse.error(message: 'The pull request is not opened', payload: nil)
diff --git a/app/services/ci/generate_coverage_reports_service.rb b/app/services/ci/generate_coverage_reports_service.rb
index 12b1f19f4b5..81f26e84ef8 100644
--- a/app/services/ci/generate_coverage_reports_service.rb
+++ b/app/services/ci/generate_coverage_reports_service.rb
@@ -32,5 +32,18 @@ module Ci
def latest?(base_pipeline, head_pipeline, data)
data&.fetch(:key, nil) == key(base_pipeline, head_pipeline)
end
+
+ private
+
+ def key(base_pipeline, head_pipeline)
+ [
+ base_pipeline&.id, last_update_timestamp(base_pipeline),
+ head_pipeline&.id, last_update_timestamp(head_pipeline)
+ ]
+ end
+
+ def last_update_timestamp(pipeline_hierarchy)
+ pipeline_hierarchy&.self_and_descendants&.maximum(:updated_at)
+ end
end
end
diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb
index 635111130d6..05f8e804c67 100644
--- a/app/services/ci/job_artifacts/create_service.rb
+++ b/app/services/ci/job_artifacts/create_service.rb
@@ -5,10 +5,7 @@ module Ci
class CreateService < ::BaseService
include Gitlab::Utils::UsageData
- ArtifactsExistError = Class.new(StandardError)
-
LSIF_ARTIFACT_TYPE = 'lsif'
- METRICS_REPORT_UPLOAD_EVENT_NAME = 'i_testing_metrics_report_artifact_uploaders'
OBJECT_STORAGE_ERRORS = [
Errno::EIO,
@@ -74,10 +71,6 @@ module Ci
Ci::JobArtifact.max_artifact_size(type: type, project: project)
end
- def forbidden_type_error(type)
- error("#{type} artifacts are forbidden", :forbidden)
- end
-
def too_large_error
error('file size has reached maximum size limit', :payload_too_large)
end
@@ -160,10 +153,8 @@ module Ci
)
end
- def track_artifact_uploader(artifact)
- return unless artifact.file_type == 'metrics'
-
- track_usage_event(METRICS_REPORT_UPLOAD_EVENT_NAME, @job.user_id)
+ def track_artifact_uploader(_artifact)
+ # Overridden in EE
end
def parse_dotenv_artifact(artifact)
@@ -172,3 +163,5 @@ module Ci
end
end
end
+
+Ci::JobArtifacts::CreateService.prepend_mod
diff --git a/app/services/ci/job_artifacts/destroy_batch_service.rb b/app/services/ci/job_artifacts/destroy_batch_service.rb
index 49b65f13804..9d6b413ce59 100644
--- a/app/services/ci/job_artifacts/destroy_batch_service.rb
+++ b/app/services/ci/job_artifacts/destroy_batch_service.rb
@@ -184,10 +184,12 @@ module Ci
project_ids << artifact.project_id
end
- Gitlab::ProjectStatsRefreshConflictsLogger.warn_skipped_artifact_deletion_during_stats_refresh(
- method: 'Ci::JobArtifacts::DestroyBatchService#execute',
- project_ids: project_ids
- )
+ if project_ids.any?
+ Gitlab::ProjectStatsRefreshConflictsLogger.warn_skipped_artifact_deletion_during_stats_refresh(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_ids: project_ids
+ )
+ end
end
end
end
diff --git a/app/services/ci/pipeline_artifacts/coverage_report_service.rb b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
index b0acb1d5a0b..c11a8f7a0fd 100644
--- a/app/services/ci/pipeline_artifacts/coverage_report_service.rb
+++ b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
@@ -9,17 +9,11 @@ module Ci
end
def execute
- return if pipeline.has_coverage_reports?
return if report.empty?
- pipeline.pipeline_artifacts.create!(
- project_id: pipeline.project_id,
- file_type: :code_coverage,
- file_format: Ci::PipelineArtifact::REPORT_TYPES.fetch(:code_coverage),
- size: carrierwave_file["tempfile"].size,
- file: carrierwave_file,
- expire_at: Ci::PipelineArtifact::EXPIRATION_DATE.from_now
- )
+ Ci::PipelineArtifact.create_or_replace_for_pipeline!(**pipeline_artifact_params).tap do |pipeline_artifact|
+ Gitlab::AppLogger.info(log_params(pipeline_artifact))
+ end
end
private
@@ -32,6 +26,15 @@ module Ci
end
end
+ def pipeline_artifact_params
+ {
+ pipeline: pipeline,
+ file_type: :code_coverage,
+ file: carrierwave_file,
+ size: carrierwave_file['tempfile'].size
+ }
+ end
+
def carrierwave_file
strong_memoize(:carrier_wave_file) do
CarrierWaveStringFile.new_file(
@@ -41,6 +44,15 @@ module Ci
)
end
end
+
+ def log_params(pipeline_artifact)
+ {
+ project_id: pipeline.project_id,
+ pipeline_id: pipeline.id,
+ pipeline_artifact_id: pipeline_artifact.id,
+ message: "Created code coverage for pipeline."
+ }
+ end
end
end
end
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
index 4d1b2e07d7f..676c2ecb257 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -78,7 +78,7 @@ module Ci
def status_for_array(statuses, dag:)
result = Gitlab::Ci::Status::Composite
- .new(statuses, dag: dag, project: pipeline.project)
+ .new(statuses, dag: dag)
.status
result || 'success'
end
diff --git a/app/services/ci/queue/build_queue_service.rb b/app/services/ci/queue/build_queue_service.rb
index fefbdb151ec..2deebc1d725 100644
--- a/app/services/ci/queue/build_queue_service.rb
+++ b/app/services/ci/queue/build_queue_service.rb
@@ -24,25 +24,7 @@ module Ci
# rubocop:disable CodeReuse/ActiveRecord
def builds_for_group_runner
- if strategy.use_denormalized_data_strategy?
- strategy.builds_for_group_runner
- else
- # Workaround for weird Rails bug, that makes `runner.groups.to_sql` to return `runner_id = NULL`
- groups = ::Group.joins(:runner_namespaces).merge(runner.runner_namespaces)
-
- hierarchy_groups = Gitlab::ObjectHierarchy
- .new(groups)
- .base_and_descendants
-
- projects = Project.where(namespace_id: hierarchy_groups)
- .with_group_runners_enabled
- .with_builds_enabled
- .without_deleted
-
- relation = new_builds.where(project: projects)
-
- order(relation)
- end
+ strategy.builds_for_group_runner
end
def builds_for_project_runner
@@ -80,11 +62,7 @@ module Ci
def strategy
strong_memoize(:strategy) do
- if ::Feature.enabled?(:ci_pending_builds_queue_source, runner)
- Queue::PendingBuildsStrategy.new(runner)
- else
- Queue::BuildsTableStrategy.new(runner)
- end
+ Queue::PendingBuildsStrategy.new(runner)
end
end
diff --git a/app/services/ci/queue/builds_table_strategy.rb b/app/services/ci/queue/builds_table_strategy.rb
deleted file mode 100644
index c27c10bd18d..00000000000
--- a/app/services/ci/queue/builds_table_strategy.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- module Queue
- class BuildsTableStrategy
- attr_reader :runner
-
- def initialize(runner)
- @runner = runner
- end
-
- # rubocop:disable CodeReuse/ActiveRecord
- def builds_for_shared_runner
- relation = new_builds
- # don't run projects which have not enabled shared runners and builds
- .joins('INNER JOIN projects ON ci_builds.project_id = projects.id')
- .where(projects: { shared_runners_enabled: true, pending_delete: false })
- .joins('LEFT JOIN project_features ON ci_builds.project_id = project_features.project_id')
- .where('project_features.builds_access_level IS NULL or project_features.builds_access_level > 0')
-
- if Feature.enabled?(:ci_queueing_disaster_recovery_disable_fair_scheduling, runner, type: :ops)
- # if disaster recovery is enabled, we fallback to FIFO scheduling
- relation.order('ci_builds.id ASC')
- else
- # Implement fair scheduling
- # this returns builds that are ordered by number of running builds
- # we prefer projects that don't use shared runners at all
- relation
- .joins("LEFT JOIN (#{running_builds_for_shared_runners.to_sql}) AS project_builds ON ci_builds.project_id = project_builds.project_id")
- .order(Arel.sql('COALESCE(project_builds.running_builds, 0) ASC'), 'ci_builds.id ASC')
- end
- end
-
- def builds_for_group_runner
- raise NotImplementedError
- end
-
- def builds_matching_tag_ids(relation, ids)
- # pick builds that does not have other tags than runner's one
- relation.matches_tag_ids(ids)
- end
-
- def builds_with_any_tags(relation)
- # pick builds that have at least one tag
- relation.with_any_tags
- end
-
- def order(relation)
- relation.order('id ASC')
- end
-
- def new_builds
- ::Ci::Build.pending.unstarted
- end
-
- def build_ids(relation)
- relation.pluck(:id)
- end
-
- def use_denormalized_data_strategy?
- false
- end
-
- private
-
- def running_builds_for_shared_runners
- ::Ci::Build.running
- .where(runner: ::Ci::Runner.instance_type)
- .group(:project_id)
- .select(:project_id, 'COUNT(*) AS running_builds')
- end
- # rubocop:enable CodeReuse/ActiveRecord
- end
- end
-end
diff --git a/app/services/ci/queue/pending_builds_strategy.rb b/app/services/ci/queue/pending_builds_strategy.rb
index f2eba0681db..c8bdbba5e65 100644
--- a/app/services/ci/queue/pending_builds_strategy.rb
+++ b/app/services/ci/queue/pending_builds_strategy.rb
@@ -23,19 +23,11 @@ module Ci
end
def builds_matching_tag_ids(relation, ids)
- if use_denormalized_data_strategy?
- relation.for_tags(runner.tags_ids)
- else
- relation.merge(CommitStatus.matches_tag_ids(ids, table: 'ci_pending_builds', column: 'build_id'))
- end
+ relation.for_tags(runner.tags_ids)
end
def builds_with_any_tags(relation)
- if use_denormalized_data_strategy?
- relation.where('cardinality(tag_ids) > 0')
- else
- relation.merge(CommitStatus.with_any_tags(table: 'ci_pending_builds', column: 'build_id'))
- end
+ relation.where('cardinality(tag_ids) > 0')
end
def order(relation)
@@ -50,23 +42,10 @@ module Ci
relation.pluck(:build_id)
end
- def use_denormalized_data_strategy?
- ::Feature.enabled?(:ci_queuing_use_denormalized_data_strategy)
- end
-
private
def builds_available_for_shared_runners
- if use_denormalized_data_strategy?
- new_builds.with_instance_runners
- else
- new_builds
- # don't run projects which have not enabled shared runners and builds
- .joins('INNER JOIN projects ON ci_pending_builds.project_id = projects.id')
- .where(projects: { shared_runners_enabled: true, pending_delete: false })
- .joins('LEFT JOIN project_features ON ci_pending_builds.project_id = project_features.project_id')
- .where('project_features.builds_access_level IS NULL or project_features.builds_access_level > 0')
- end
+ new_builds.with_instance_runners
end
def builds_ordered_for_shared_runners(relation)
diff --git a/app/services/ci/runners/reconcile_existing_runner_versions_service.rb b/app/services/ci/runners/reconcile_existing_runner_versions_service.rb
new file mode 100644
index 00000000000..e04079bfe27
--- /dev/null
+++ b/app/services/ci/runners/reconcile_existing_runner_versions_service.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+module Ci
+ module Runners
+ class ReconcileExistingRunnerVersionsService
+ include BaseServiceUtility
+
+ VERSION_BATCH_SIZE = 100
+
+ def execute
+ insert_result = insert_runner_versions
+ total_deleted = cleanup_runner_versions(insert_result[:versions_from_runners])
+ total_updated = update_status_on_outdated_runner_versions(insert_result[:versions_from_runners])
+
+ success({
+ total_inserted: insert_result[:new_record_count],
+ total_updated: total_updated,
+ total_deleted: total_deleted
+ })
+ end
+
+ private
+
+ def upgrade_check
+ Gitlab::Ci::RunnerUpgradeCheck.instance
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def insert_runner_versions
+ versions_from_runners = Set[]
+ new_record_count = 0
+ Ci::Runner.distinct_each_batch(column: :version, of: VERSION_BATCH_SIZE) do |version_batch|
+ batch_versions = version_batch.pluck(:version).to_set
+ versions_from_runners += batch_versions
+
+ # Avoid hitting primary DB
+ already_existing_versions = Ci::RunnerVersion.where(version: batch_versions).pluck(:version)
+ new_versions = batch_versions - already_existing_versions
+
+ if new_versions.any?
+ new_record_count += Ci::RunnerVersion.insert_all(
+ new_versions.map { |v| { version: v } },
+ returning: :version,
+ unique_by: :version).count
+ end
+ end
+
+ { versions_from_runners: versions_from_runners, new_record_count: new_record_count }
+ end
+
+ def cleanup_runner_versions(versions_from_runners)
+ Ci::RunnerVersion.where.not(version: versions_from_runners).delete_all
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def outdated_runner_versions
+ Ci::RunnerVersion.potentially_outdated
+ end
+
+ def update_status_on_outdated_runner_versions(versions_from_runners)
+ total_updated = 0
+
+ outdated_runner_versions.each_batch(of: VERSION_BATCH_SIZE) do |version_batch|
+ updated = version_batch
+ .select { |runner_version| versions_from_runners.include?(runner_version['version']) }
+ .filter_map { |runner_version| runner_version_with_updated_status(runner_version) }
+
+ if updated.any?
+ total_updated += Ci::RunnerVersion.upsert_all(updated, unique_by: :version).count
+ end
+ end
+
+ total_updated
+ end
+
+ def runner_version_with_updated_status(runner_version)
+ version = runner_version['version']
+ suggestion = upgrade_check.check_runner_upgrade_status(version)
+ new_status = suggestion.each_key.first
+
+ if new_status != :error && new_status != runner_version['status'].to_sym
+ {
+ version: version,
+ status: Ci::RunnerVersion.statuses[new_status]
+ }
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/ci/runners/register_runner_service.rb b/app/services/ci/runners/register_runner_service.rb
index 196d2de1a65..6588cd7e248 100644
--- a/app/services/ci/runners/register_runner_service.rb
+++ b/app/services/ci/runners/register_runner_service.rb
@@ -8,7 +8,19 @@ module Ci
return unless runner_type_attrs
- ::Ci::Runner.create(attributes.merge(runner_type_attrs))
+ runner = ::Ci::Runner.new(attributes.merge(runner_type_attrs))
+
+ Ci::BulkInsertableTags.with_bulk_insert_tags do
+ Ci::Runner.transaction do
+ if runner.save
+ Gitlab::Ci::Tags::BulkInsert.bulk_insert_tags!([runner])
+ else
+ raise ActiveRecord::Rollback
+ end
+ end
+ end
+
+ runner
end
private
diff --git a/app/services/ci/test_failure_history_service.rb b/app/services/ci/test_failure_history_service.rb
index 7323ad417ea..2214a6a2729 100644
--- a/app/services/ci/test_failure_history_service.rb
+++ b/app/services/ci/test_failure_history_service.rb
@@ -81,7 +81,7 @@ module Ci
def generate_test_suite!(build)
# Returns an instance of Gitlab::Ci::Reports::TestSuite
- build.collect_test_reports!(Gitlab::Ci::Reports::TestReports.new)
+ build.collect_test_reports!(Gitlab::Ci::Reports::TestReport.new)
end
def ci_unit_test_attrs(batch)
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index a525ea179e0..58927a90b6e 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -14,8 +14,6 @@ module Ci
# Add a build to the pending builds queue
#
def push(build, transition)
- return unless maintain_pending_builds_queue?
-
raise InvalidQueueTransition unless transition.to == 'pending'
transition.within_transaction do
@@ -33,8 +31,6 @@ module Ci
# Remove a build from the pending builds queue
#
def pop(build, transition)
- return unless maintain_pending_builds_queue?
-
raise InvalidQueueTransition unless transition.from == 'pending'
transition.within_transaction { remove!(build) }
@@ -57,7 +53,6 @@ module Ci
# Add shared runner build tracking entry (used for queuing).
#
def track(build, transition)
- return unless maintain_pending_builds_queue?
return unless build.shared_runner_build?
raise InvalidQueueTransition unless transition.to == 'running'
@@ -78,7 +73,6 @@ module Ci
# queuing).
#
def untrack(build, transition)
- return unless maintain_pending_builds_queue?
return unless build.shared_runner_build?
raise InvalidQueueTransition unless transition.from == 'running'
@@ -115,9 +109,5 @@ module Ci
runner.pick_build!(build)
end
end
-
- def maintain_pending_builds_queue?
- ::Ci::PendingBuild.maintain_denormalized_data?
- end
end
end
diff --git a/app/services/ci/update_pending_build_service.rb b/app/services/ci/update_pending_build_service.rb
index 733b684bcc6..2118dbcc19e 100644
--- a/app/services/ci/update_pending_build_service.rb
+++ b/app/services/ci/update_pending_build_service.rb
@@ -15,8 +15,6 @@ module Ci
end
def execute
- return unless ::Ci::PendingBuild.maintain_denormalized_data?
-
@model.pending_builds.each_batch do |relation|
relation.update_all(@update_params)
end
diff --git a/app/services/clusters/integrations/create_service.rb b/app/services/clusters/integrations/create_service.rb
index 142f731a7d3..555df52d177 100644
--- a/app/services/clusters/integrations/create_service.rb
+++ b/app/services/clusters/integrations/create_service.rb
@@ -31,8 +31,6 @@ module Clusters
case params[:application_type]
when 'prometheus'
cluster.find_or_build_integration_prometheus
- when 'elastic_stack'
- cluster.find_or_build_integration_elastic_stack
else
raise ArgumentError, "invalid application_type: #{params[:application_type]}"
end
diff --git a/app/services/concerns/alert_management/alert_processing.rb b/app/services/concerns/alert_management/alert_processing.rb
index abbfd4d66d4..f10ff4e6f19 100644
--- a/app/services/concerns/alert_management/alert_processing.rb
+++ b/app/services/concerns/alert_management/alert_processing.rb
@@ -38,7 +38,7 @@ module AlertManagement
if alert.resolve(incoming_payload.ends_at)
SystemNoteService.change_alert_status(alert, User.alert_bot)
- close_issue(alert.issue) if auto_close_incident?
+ close_issue(alert.issue_id) if auto_close_incident?
else
logger.warn(
message: 'Unable to update AlertManagement::Alert status to resolved',
@@ -52,22 +52,18 @@ module AlertManagement
alert.register_new_event!
end
- def close_issue(issue)
- return if issue.blank? || issue.closed?
+ def close_issue(issue_id)
+ return unless issue_id
- ::Issues::CloseService
- .new(project: project, current_user: User.alert_bot)
- .execute(issue, system_note: false)
-
- SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
+ ::IncidentManagement::CloseIncidentWorker.perform_async(issue_id)
end
def process_new_alert
+ return if resolving_alert?
+
if alert.save
alert.execute_integrations
SystemNoteService.create_new_alert(alert, alert_source)
-
- process_resolved_alert if resolving_alert?
else
logger.warn(
message: "Unable to create AlertManagement::Alert from #{alert_source}",
@@ -78,7 +74,7 @@ module AlertManagement
end
def process_incident_issues
- return if alert.issue || alert.resolved?
+ return if alert.issue_id || alert.resolved?
::IncidentManagement::ProcessAlertWorkerV2.perform_async(alert.id)
end
diff --git a/app/services/concerns/integrations/project_test_data.rb b/app/services/concerns/integrations/project_test_data.rb
index acaa773fd49..ae1e1d1e66c 100644
--- a/app/services/concerns/integrations/project_test_data.rb
+++ b/app/services/concerns/integrations/project_test_data.rb
@@ -63,7 +63,7 @@ module Integrations
return { error: s_('TestHooks|Ensure the project has deployments.') } unless deployment.present?
- Gitlab::DataBuilder::Deployment.build(deployment, Time.current)
+ Gitlab::DataBuilder::Deployment.build(deployment, deployment.status, Time.current)
end
def releases_events_data
diff --git a/app/services/concerns/work_items/widgetable_service.rb b/app/services/concerns/work_items/widgetable_service.rb
new file mode 100644
index 00000000000..5665b07dce1
--- /dev/null
+++ b/app/services/concerns/work_items/widgetable_service.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module WidgetableService
+ def execute_widgets(work_item:, callback:, widget_params: {})
+ work_item.widgets.each do |widget|
+ widget_service(widget).try(callback, params: widget_params[widget.class.api_symbol])
+ end
+ end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def widget_service(widget)
+ @widget_services ||= {}
+ return @widget_services[widget] if @widget_services.has_key?(widget)
+
+ @widget_services[widget] = widget_service_class(widget)&.new(widget: widget, current_user: current_user)
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
+ def widget_service_class(widget)
+ "WorkItems::Widgets::#{widget.type.capitalize}Service::#{self.class.name.demodulize}".constantize
+ rescue NameError
+ nil
+ end
+ end
+end
diff --git a/app/services/deployments/create_for_build_service.rb b/app/services/deployments/create_for_build_service.rb
index b3e2d2edb59..76d871161e3 100644
--- a/app/services/deployments/create_for_build_service.rb
+++ b/app/services/deployments/create_for_build_service.rb
@@ -11,8 +11,18 @@ module Deployments
# TODO: Move all buisness logic in `Seed::Deployment` to this class after
# `create_deployment_in_separate_transaction` feature flag has been removed.
# See https://gitlab.com/gitlab-org/gitlab/-/issues/348778
+
+ # If build.persisted_environment is a BatchLoader, we need to remove
+ # the method proxy in order to clone into new item here
+ # https://github.com/exAspArk/batch-loader/issues/31
+ environment = if build.persisted_environment.respond_to?(:__sync)
+ build.persisted_environment.__sync
+ else
+ build.persisted_environment
+ end
+
deployment = ::Gitlab::Ci::Pipeline::Seed::Deployment
- .new(build, build.persisted_environment).to_resource
+ .new(build, environment).to_resource
return unless deployment
diff --git a/app/services/error_tracking/list_issues_service.rb b/app/services/error_tracking/list_issues_service.rb
index ca7208dba96..eae736ae10d 100644
--- a/app/services/error_tracking/list_issues_service.rb
+++ b/app/services/error_tracking/list_issues_service.rb
@@ -75,6 +75,7 @@ module ErrorTracking
# For now we implement the bare minimum for rendering the list in UI.
list_opts = {
filters: { status: opts[:issue_status] },
+ query: opts[:search_term],
sort: opts[:sort],
limit: opts[:limit],
cursor: opts[:cursor]
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index 2ab4bb47462..019246dfc9f 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -25,14 +25,18 @@ class EventCreateService
def open_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :created).tap do
track_event(event_action: :created, event_target: MergeRequest, author_id: current_user.id)
- track_mr_snowplow_event(merge_request, current_user, :create)
+ track_snowplow_event(merge_request, current_user,
+ Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
+ :create, 'merge_requests_users')
end
end
def close_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :closed).tap do
track_event(event_action: :closed, event_target: MergeRequest, author_id: current_user.id)
- track_mr_snowplow_event(merge_request, current_user, :close)
+ track_snowplow_event(merge_request, current_user,
+ Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
+ :close, 'merge_requests_users')
end
end
@@ -43,7 +47,9 @@ class EventCreateService
def merge_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :merged).tap do
track_event(event_action: :merged, event_target: MergeRequest, author_id: current_user.id)
- track_mr_snowplow_event(merge_request, current_user, :merge)
+ track_snowplow_event(merge_request, current_user,
+ Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
+ :merge, 'merge_requests_users')
end
end
@@ -67,7 +73,9 @@ class EventCreateService
create_record_event(note, current_user, :commented).tap do
if note.is_a?(DiffNote) && note.for_merge_request?
track_event(event_action: :commented, event_target: MergeRequest, author_id: current_user.id)
- track_mr_snowplow_event(note, current_user, :comment)
+ track_snowplow_event(note, current_user,
+ Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION,
+ :comment, 'merge_requests_users')
end
end
end
@@ -100,12 +108,27 @@ class EventCreateService
records = create.zip([:created].cycle) + update.zip([:updated].cycle)
return [] if records.empty?
+ if create.any?
+ track_snowplow_event(create.first, current_user,
+ Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION,
+ :create, 'design_users')
+ end
+
+ if update.any?
+ track_snowplow_event(update.first, current_user,
+ Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION,
+ :update, 'design_users')
+ end
+
create_record_events(records, current_user)
end
def destroy_designs(designs, current_user)
return [] unless designs.present?
+ track_snowplow_event(designs.first, current_user,
+ Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION,
+ :destroy, 'design_users')
create_record_events(designs.zip([:destroyed].cycle), current_user)
end
@@ -230,14 +253,14 @@ class EventCreateService
Gitlab::UsageDataCounters::TrackUniqueEvents.track_event(**params)
end
- def track_mr_snowplow_event(record, current_user, action)
+ def track_snowplow_event(record, current_user, category, action, label)
return unless Feature.enabled?(:route_hll_to_snowplow_phase2)
project = record.project
Gitlab::Tracking.event(
- Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION.to_s,
+ category.to_s,
action.to_s,
- label: 'merge_requests_users',
+ label: label,
project: project,
namespace: project.namespace,
user: current_user
diff --git a/app/services/feature_flags/base_service.rb b/app/services/feature_flags/base_service.rb
index 86dc6188f0a..59db1a5f12f 100644
--- a/app/services/feature_flags/base_service.rb
+++ b/app/services/feature_flags/base_service.rb
@@ -15,6 +15,10 @@ module FeatureFlags
protected
+ def update_last_feature_flag_updated_at!
+ Operations::FeatureFlagsClient.update_last_feature_flag_updated_at!(project)
+ end
+
def audit_event(feature_flag)
message = audit_message(feature_flag)
diff --git a/app/services/feature_flags/create_service.rb b/app/services/feature_flags/create_service.rb
index ebbe71f39c7..6ea40345191 100644
--- a/app/services/feature_flags/create_service.rb
+++ b/app/services/feature_flags/create_service.rb
@@ -10,6 +10,8 @@ module FeatureFlags
feature_flag = project.operations_feature_flags.new(params)
if feature_flag.save
+ update_last_feature_flag_updated_at!
+
success(feature_flag: feature_flag)
else
error(feature_flag.errors.full_messages, 400)
diff --git a/app/services/feature_flags/destroy_service.rb b/app/services/feature_flags/destroy_service.rb
index 817a80940c0..0fdc890b8a3 100644
--- a/app/services/feature_flags/destroy_service.rb
+++ b/app/services/feature_flags/destroy_service.rb
@@ -13,6 +13,8 @@ module FeatureFlags
ApplicationRecord.transaction do
if feature_flag.destroy
+ update_last_feature_flag_updated_at!
+
success(feature_flag: feature_flag)
else
error(feature_flag.errors.full_messages)
diff --git a/app/services/feature_flags/update_service.rb b/app/services/feature_flags/update_service.rb
index bcfd2c15189..a465ca1dd5f 100644
--- a/app/services/feature_flags/update_service.rb
+++ b/app/services/feature_flags/update_service.rb
@@ -29,6 +29,8 @@ module FeatureFlags
audit_event = audit_event(feature_flag)
if feature_flag.save
+ update_last_feature_flag_updated_at!
+
success(feature_flag: feature_flag, audit_event: audit_event)
else
error(feature_flag.errors.full_messages, :bad_request)
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index aa471d3a69f..7de56c037ed 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -81,6 +81,8 @@ module Git
branch_update_hooks if updating_branch?
branch_change_hooks if creating_branch? || updating_branch?
branch_remove_hooks if removing_branch?
+
+ track_process_commit_limit_overflow
end
def branch_create_hooks
@@ -123,6 +125,12 @@ module Git
end
end
+ def track_process_commit_limit_overflow
+ return if threshold_commits.count <= PROCESS_COMMIT_LIMIT
+
+ Gitlab::Metrics.add_event(:process_commit_limit_overflow)
+ end
+
# Schedules processing of commit messages
def enqueue_process_commit_messages
referencing_commits = limited_commits.select(&:matches_cross_reference_regex?)
diff --git a/app/services/git/branch_push_service.rb b/app/services/git/branch_push_service.rb
index 91f14251608..464d79d9991 100644
--- a/app/services/git/branch_push_service.rb
+++ b/app/services/git/branch_push_service.rb
@@ -39,13 +39,13 @@ module Git
def enqueue_update_mrs
return if params[:merge_request_branches]&.exclude?(branch_name)
- # TODO: pass params[:push_options] to worker
UpdateMergeRequestsWorker.perform_async(
project.id,
current_user.id,
oldrev,
newrev,
- ref
+ ref,
+ params.slice(:push_options).deep_stringify_keys
)
end
diff --git a/app/services/google_cloud/base_service.rb b/app/services/google_cloud/base_service.rb
new file mode 100644
index 00000000000..016ab15408f
--- /dev/null
+++ b/app/services/google_cloud/base_service.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module GoogleCloud
+ class BaseService < ::BaseService
+ protected
+
+ def google_oauth2_token
+ @params[:google_oauth2_token]
+ end
+
+ def gcp_project_id
+ @params[:gcp_project_id]
+ end
+
+ def environment_name
+ @params[:environment_name]
+ end
+
+ def google_api_client
+ @google_api_client_instance ||= GoogleApi::CloudPlatform::Client.new(google_oauth2_token, nil)
+ end
+
+ def unique_gcp_project_ids
+ filter_params = { key: 'GCP_PROJECT_ID' }
+ ::Ci::VariablesFinder.new(project, filter_params).execute.map(&:value).uniq
+ end
+
+ def group_vars_by_environment(keys)
+ filtered_vars = project.variables.filter { |variable| keys.include? variable.key }
+ filtered_vars.each_with_object({}) do |variable, grouped|
+ grouped[variable.environment_scope] ||= {}
+ grouped[variable.environment_scope][variable.key] = variable.value
+ end
+ end
+
+ def create_or_replace_project_vars(environment_scope, key, value, is_protected, is_masked = false)
+ change_params = {
+ variable_params: {
+ key: key,
+ value: value,
+ environment_scope: environment_scope,
+ protected: is_protected,
+ masked: is_masked
+ }
+ }
+ existing_variable = find_existing_variable(environment_scope, key)
+
+ if existing_variable
+ change_params[:action] = :update
+ change_params[:variable] = existing_variable
+ else
+ change_params[:action] = :create
+ end
+
+ ::Ci::ChangeVariableService.new(container: project, current_user: current_user, params: change_params).execute
+ end
+
+ private
+
+ def find_existing_variable(environment_scope, key)
+ filter_params = { key: key, filter: { environment_scope: environment_scope } }
+ ::Ci::VariablesFinder.new(project, filter_params).execute.first
+ end
+ end
+end
diff --git a/app/services/google_cloud/create_service_accounts_service.rb b/app/services/google_cloud/create_service_accounts_service.rb
index 51d08cc5b55..9617161b8e9 100644
--- a/app/services/google_cloud/create_service_accounts_service.rb
+++ b/app/services/google_cloud/create_service_accounts_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module GoogleCloud
- class CreateServiceAccountsService < :: BaseService
+ class CreateServiceAccountsService < ::GoogleCloud::BaseService
def execute
service_account = google_api_client.create_service_account(gcp_project_id, service_account_name, service_account_desc)
service_account_key = google_api_client.create_service_account_key(gcp_project_id, service_account.unique_id)
@@ -23,22 +23,6 @@ module GoogleCloud
private
- def google_oauth2_token
- @params[:google_oauth2_token]
- end
-
- def gcp_project_id
- @params[:gcp_project_id]
- end
-
- def environment_name
- @params[:environment_name]
- end
-
- def google_api_client
- @google_api_client_instance ||= GoogleApi::CloudPlatform::Client.new(google_oauth2_token, nil)
- end
-
def service_accounts_service
GoogleCloud::ServiceAccountsService.new(project)
end
diff --git a/app/services/google_cloud/enable_cloud_run_service.rb b/app/services/google_cloud/enable_cloud_run_service.rb
index 643f2b2b6d2..4fd92f423c5 100644
--- a/app/services/google_cloud/enable_cloud_run_service.rb
+++ b/app/services/google_cloud/enable_cloud_run_service.rb
@@ -1,15 +1,13 @@
# frozen_string_literal: true
module GoogleCloud
- class EnableCloudRunService < :: BaseService
+ class EnableCloudRunService < ::GoogleCloud::BaseService
def execute
gcp_project_ids = unique_gcp_project_ids
if gcp_project_ids.empty?
error("No GCP projects found. Configure a service account or GCP_PROJECT_ID ci variable.")
else
- google_api_client = GoogleApi::CloudPlatform::Client.new(token_in_session, nil)
-
gcp_project_ids.each do |gcp_project_id|
google_api_client.enable_cloud_run(gcp_project_id)
google_api_client.enable_artifacts_registry(gcp_project_id)
@@ -19,16 +17,5 @@ module GoogleCloud
success({ gcp_project_ids: gcp_project_ids })
end
end
-
- private
-
- def unique_gcp_project_ids
- all_gcp_project_ids = project.variables.filter { |var| var.key == 'GCP_PROJECT_ID' }.map { |var| var.value }
- all_gcp_project_ids.uniq
- end
-
- def token_in_session
- @params[:token_in_session]
- end
end
end
diff --git a/app/services/google_cloud/gcp_region_add_or_replace_service.rb b/app/services/google_cloud/gcp_region_add_or_replace_service.rb
index 467f818bcc7..f79df707a08 100644
--- a/app/services/google_cloud/gcp_region_add_or_replace_service.rb
+++ b/app/services/google_cloud/gcp_region_add_or_replace_service.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
module GoogleCloud
- class GcpRegionAddOrReplaceService < ::BaseService
+ class GcpRegionAddOrReplaceService < ::GoogleCloud::BaseService
def execute(environment, region)
- gcp_region_key = Projects::GoogleCloudController::GCP_REGION_CI_VAR_KEY
+ gcp_region_key = Projects::GoogleCloud::GcpRegionsController::GCP_REGION_CI_VAR_KEY
change_params = { variable_params: { key: gcp_region_key, value: region, environment_scope: environment } }
filter_params = { key: gcp_region_key, filter: { environment_scope: environment } }
diff --git a/app/services/google_cloud/generate_pipeline_service.rb b/app/services/google_cloud/generate_pipeline_service.rb
index 077f815e60c..be0c7a783c9 100644
--- a/app/services/google_cloud/generate_pipeline_service.rb
+++ b/app/services/google_cloud/generate_pipeline_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module GoogleCloud
- class GeneratePipelineService < :: BaseService
+ class GeneratePipelineService < ::GoogleCloud::BaseService
ACTION_DEPLOY_TO_CLOUD_RUN = 'DEPLOY_TO_CLOUD_RUN'
ACTION_DEPLOY_TO_CLOUD_STORAGE = 'DEPLOY_TO_CLOUD_STORAGE'
diff --git a/app/services/google_cloud/service_accounts_service.rb b/app/services/google_cloud/service_accounts_service.rb
index b791f07cd65..e90fd112e2e 100644
--- a/app/services/google_cloud/service_accounts_service.rb
+++ b/app/services/google_cloud/service_accounts_service.rb
@@ -8,7 +8,7 @@ module GoogleCloud
##
# This service deals with GCP Service Accounts in GitLab
- class ServiceAccountsService < ::BaseService
+ class ServiceAccountsService < ::GoogleCloud::BaseService
##
# Find GCP Service Accounts in a GitLab project
#
@@ -17,7 +17,7 @@ module GoogleCloud
# aligning GitLab project and ref to GCP projects
def find_for_project
- group_vars_by_ref.map do |environment_scope, value|
+ group_vars_by_environment(GCP_KEYS).map do |environment_scope, value|
{
ref: environment_scope,
gcp_project: value['GCP_PROJECT_ID'],
@@ -28,50 +28,24 @@ module GoogleCloud
end
def add_for_project(ref, gcp_project_id, service_account, service_account_key, is_protected)
- project_var_create_or_replace(
+ create_or_replace_project_vars(
ref,
'GCP_PROJECT_ID',
gcp_project_id,
is_protected
)
- project_var_create_or_replace(
+ create_or_replace_project_vars(
ref,
'GCP_SERVICE_ACCOUNT',
service_account,
is_protected
)
- project_var_create_or_replace(
+ create_or_replace_project_vars(
ref,
'GCP_SERVICE_ACCOUNT_KEY',
service_account_key,
is_protected
)
end
-
- private
-
- def group_vars_by_ref
- filtered_vars = project.variables.filter { |variable| GCP_KEYS.include? variable.key }
- filtered_vars.each_with_object({}) do |variable, grouped|
- grouped[variable.environment_scope] ||= {}
- grouped[variable.environment_scope][variable.key] = variable.value
- end
- end
-
- def project_var_create_or_replace(environment_scope, key, value, is_protected)
- change_params = { variable_params: { key: key, value: value, environment_scope: environment_scope, protected: is_protected } }
- filter_params = { key: key, filter: { environment_scope: environment_scope } }
-
- existing_variable = ::Ci::VariablesFinder.new(project, filter_params).execute.first
-
- if existing_variable
- change_params[:action] = :update
- change_params[:variable] = existing_variable
- else
- change_params[:action] = :create
- end
-
- ::Ci::ChangeVariableService.new(container: project, current_user: current_user, params: change_params).execute
- end
end
end
diff --git a/app/services/google_cloud/setup_cloudsql_instance_service.rb b/app/services/google_cloud/setup_cloudsql_instance_service.rb
new file mode 100644
index 00000000000..73650ee752f
--- /dev/null
+++ b/app/services/google_cloud/setup_cloudsql_instance_service.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module GoogleCloud
+ class SetupCloudsqlInstanceService < ::GoogleCloud::BaseService
+ INSTANCE_STATE_RUNNABLE = 'RUNNABLE'
+ OPERATION_STATE_DONE = 'DONE'
+ DEFAULT_DATABASE_NAME = 'main_db'
+ DEFAULT_DATABASE_USER = 'main_user'
+
+ def execute
+ return error('Unauthorized user') unless Ability.allowed?(current_user, :admin_project_google_cloud, project)
+
+ get_instance_response = google_api_client.get_cloudsql_instance(gcp_project_id, instance_name)
+
+ if get_instance_response.state != INSTANCE_STATE_RUNNABLE
+ return error("CloudSQL instance not RUNNABLE: #{get_instance_response.to_json}")
+ end
+
+ database_response = google_api_client.create_cloudsql_database(gcp_project_id, instance_name, database_name)
+
+ if database_response.status != OPERATION_STATE_DONE
+ return error("Database creation failed: #{database_response.to_json}")
+ end
+
+ user_response = google_api_client.create_cloudsql_user(gcp_project_id, instance_name, username, password)
+
+ if user_response.status != OPERATION_STATE_DONE
+ return error("User creation failed: #{user_response.to_json}")
+ end
+
+ primary_ip_address = get_instance_response.ip_addresses.first.ip_address
+ connection_name = get_instance_response.connection_name
+
+ save_ci_var('GCP_PROJECT_ID', gcp_project_id)
+ save_ci_var('GCP_CLOUDSQL_INSTANCE_NAME', instance_name)
+ save_ci_var('GCP_CLOUDSQL_CONNECTION_NAME', connection_name)
+ save_ci_var('GCP_CLOUDSQL_PRIMARY_IP_ADDRESS', primary_ip_address)
+ save_ci_var('GCP_CLOUDSQL_VERSION', database_version)
+ save_ci_var('GCP_CLOUDSQL_DATABASE_NAME', database_name)
+ save_ci_var('GCP_CLOUDSQL_DATABASE_USER', username)
+ save_ci_var('GCP_CLOUDSQL_DATABASE_PASS', password, true)
+
+ success
+ rescue Google::Apis::Error => err
+ error(message: err.to_json)
+ end
+
+ private
+
+ def instance_name
+ @params[:instance_name]
+ end
+
+ def database_version
+ @params[:database_version]
+ end
+
+ def database_name
+ @params.fetch(:database_name, DEFAULT_DATABASE_NAME)
+ end
+
+ def username
+ @params.fetch(:username, DEFAULT_DATABASE_USER)
+ end
+
+ def password
+ SecureRandom.hex(16)
+ end
+
+ def save_ci_var(key, value, is_masked = false)
+ create_or_replace_project_vars(environment_name, key, value, @params[:is_protected], is_masked)
+ end
+ end
+end
diff --git a/app/services/gravatar_service.rb b/app/services/gravatar_service.rb
index a689b088854..9d5990f2c8a 100644
--- a/app/services/gravatar_service.rb
+++ b/app/services/gravatar_service.rb
@@ -2,6 +2,7 @@
class GravatarService
def execute(email, size = nil, scale = 2, username: nil)
+ return if Gitlab::FIPS.enabled?
return unless Gitlab::CurrentSettings.gravatar_enabled?
identifier = email.presence || username.presence
diff --git a/app/services/groups/base_service.rb b/app/services/groups/base_service.rb
index 06136aff50e..9705f3a560d 100644
--- a/app/services/groups/base_service.rb
+++ b/app/services/groups/base_service.rb
@@ -13,11 +13,11 @@ module Groups
private
def handle_namespace_settings
- settings_params = params.slice(*::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS)
+ settings_params = params.slice(*::NamespaceSetting.allowed_namespace_settings_params)
return if settings_params.empty?
- ::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS.each do |nsp|
+ ::NamespaceSetting.allowed_namespace_settings_params.each do |nsp|
params.delete(nsp)
end
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index 639f7c68c40..35716f7742a 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -13,7 +13,7 @@ module Groups
remove_unallowed_params
set_visibility_level
- @group = Group.new(params.except(*::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS))
+ @group = Group.new(params.except(*::NamespaceSetting.allowed_namespace_settings_params))
@group.build_namespace_settings
handle_namespace_settings
diff --git a/app/services/groups/group_links/destroy_service.rb b/app/services/groups/group_links/destroy_service.rb
index 0e7fd7e0817..4d74b5f32e2 100644
--- a/app/services/groups/group_links/destroy_service.rb
+++ b/app/services/groups/group_links/destroy_service.rb
@@ -16,6 +16,8 @@ module Groups
groups_to_refresh = links.map(&:shared_with_group)
groups_to_refresh.uniq.each do |group|
+ next if Feature.enabled?(:skip_group_share_unlink_auth_refresh, group.root_ancestor)
+
group.refresh_members_authorized_projects(blocking: false, direct_members_only: true)
end
else
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index a0021ae2ccb..29e3a9473ab 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -162,6 +162,12 @@ module Groups
projects_to_update
.update_all(visibility_level: @new_parent_group.visibility_level)
+
+ update_project_settings(@updated_project_ids)
+ end
+
+ # Overridden in EE
+ def update_project_settings(updated_project_ids)
end
def update_two_factor_authentication
diff --git a/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb b/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb
index b7f8b268f18..d11492e062a 100644
--- a/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb
+++ b/app/services/incident_management/issuable_escalation_statuses/after_update_service.rb
@@ -6,7 +6,6 @@ module IncidentManagement
def initialize(issuable, current_user, **params)
@issuable = issuable
@escalation_status = issuable.escalation_status
- @alert = issuable.alert_management_alert
super(project: issuable.project, current_user: current_user, params: params)
end
@@ -19,29 +18,24 @@ module IncidentManagement
private
- attr_reader :issuable, :escalation_status, :alert
+ attr_reader :issuable, :escalation_status
def after_update
- sync_status_to_alert
add_status_system_note
+ add_timeline_event
end
- def sync_status_to_alert
- return unless alert
- return if alert.status == escalation_status.status
+ def add_status_system_note
+ return unless escalation_status.status_previously_changed?
- ::AlertManagement::Alerts::UpdateService.new(
- alert,
- current_user,
- status: escalation_status.status_name,
- status_change_reason: " by changing the incident status of #{issuable.to_reference(project)}"
- ).execute
+ SystemNoteService.change_incident_status(issuable, current_user, params[:status_change_reason])
end
- def add_status_system_note
+ def add_timeline_event
return unless escalation_status.status_previously_changed?
- SystemNoteService.change_incident_status(issuable, current_user, params[:status_change_reason])
+ IncidentManagement::TimelineEvents::CreateService
+ .change_incident_status(issuable, current_user, escalation_status)
end
end
end
diff --git a/app/services/incident_management/issuable_escalation_statuses/build_service.rb b/app/services/incident_management/issuable_escalation_statuses/build_service.rb
index 9ebcf72a0c9..b3c57da03cb 100644
--- a/app/services/incident_management/issuable_escalation_statuses/build_service.rb
+++ b/app/services/incident_management/issuable_escalation_statuses/build_service.rb
@@ -5,30 +5,17 @@ module IncidentManagement
class BuildService < ::BaseProjectService
def initialize(issue)
@issue = issue
- @alert = issue.alert_management_alert
super(project: issue.project)
end
def execute
- return issue.escalation_status if issue.escalation_status
-
- issue.build_incident_management_issuable_escalation_status(alert_params)
+ issue.escalation_status || issue.build_incident_management_issuable_escalation_status
end
private
- attr_reader :issue, :alert
-
- def alert_params
- return {} unless alert
-
- {
- status_event: alert.status_event_for(alert.status_name)
- }
- end
+ attr_reader :issue
end
end
end
-
-IncidentManagement::IssuableEscalationStatuses::BuildService.prepend_mod
diff --git a/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb b/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb
index 1d0504a6e80..58777848151 100644
--- a/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb
+++ b/app/services/incident_management/issuable_escalation_statuses/prepare_update_service.rb
@@ -5,7 +5,7 @@ module IncidentManagement
class PrepareUpdateService < ::BaseProjectService
include Gitlab::Utils::StrongMemoize
- SUPPORTED_PARAMS = %i[status status_change_reason].freeze
+ SUPPORTED_PARAMS = %i[status].freeze
def initialize(issuable, current_user, params)
@issuable = issuable
diff --git a/app/services/incident_management/timeline_events/create_service.rb b/app/services/incident_management/timeline_events/create_service.rb
index 5e5feed65c2..3cb67ccf2b1 100644
--- a/app/services/incident_management/timeline_events/create_service.rb
+++ b/app/services/incident_management/timeline_events/create_service.rb
@@ -4,6 +4,7 @@ module IncidentManagement
module TimelineEvents
DEFAULT_ACTION = 'comment'
DEFAULT_EDITABLE = false
+ DEFAULT_AUTO_CREATED = false
class CreateService < TimelineEvents::BaseService
def initialize(incident, user, params)
@@ -11,6 +12,42 @@ module IncidentManagement
@incident = incident
@user = user
@params = params
+ @auto_created = !!params.fetch(:auto_created, DEFAULT_AUTO_CREATED)
+ end
+
+ class << self
+ def create_incident(incident, user)
+ note = "@#{user.username} created the incident"
+ occurred_at = incident.created_at
+ action = 'issues'
+
+ new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
+ end
+
+ def reopen_incident(incident, user)
+ note = "@#{user.username} reopened the incident"
+ occurred_at = incident.updated_at
+ action = 'issues'
+
+ new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
+ end
+
+ def resolve_incident(incident, user)
+ note = "@#{user.username} resolved the incident"
+ occurred_at = incident.updated_at
+ action = 'status'
+
+ new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
+ end
+
+ def change_incident_status(incident, user, escalation_status)
+ status = escalation_status.status_name.to_s.titleize
+ note = "@#{user.username} changed the incident status to **#{status}**"
+ occurred_at = incident.updated_at
+ action = 'status'
+
+ new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
+ end
end
def execute
@@ -32,8 +69,8 @@ module IncidentManagement
if timeline_event.save
add_system_note(timeline_event)
-
track_usage_event(:incident_management_timeline_event_created, user.id)
+
success(timeline_event)
else
error_in_save(timeline_event)
@@ -42,9 +79,16 @@ module IncidentManagement
private
- attr_reader :project, :user, :incident, :params
+ attr_reader :project, :user, :incident, :params, :auto_created
+
+ def allowed?
+ return true if auto_created
+
+ super
+ end
def add_system_note(timeline_event)
+ return if auto_created
return unless Feature.enabled?(:incident_timeline, project)
SystemNoteService.add_timeline_event(timeline_event)
diff --git a/app/services/incident_management/timeline_events/update_service.rb b/app/services/incident_management/timeline_events/update_service.rb
index 83497b123dd..8217c8125c2 100644
--- a/app/services/incident_management/timeline_events/update_service.rb
+++ b/app/services/incident_management/timeline_events/update_service.rb
@@ -17,7 +17,6 @@ module IncidentManagement
end
def execute
- return error_non_editable unless timeline_event.editable?
return error_no_permissions unless allowed?
if timeline_event.update(update_params)
@@ -59,8 +58,8 @@ module IncidentManagement
:none
end
- def error_non_editable
- error(_('You cannot edit this timeline event.'))
+ def allowed?
+ user&.can?(:edit_incident_management_timeline_event, timeline_event)
end
end
end
diff --git a/app/services/issuable/clone/attributes_rewriter.rb b/app/services/issuable/clone/attributes_rewriter.rb
deleted file mode 100644
index 279d3051848..00000000000
--- a/app/services/issuable/clone/attributes_rewriter.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-module Issuable
- module Clone
- class AttributesRewriter < ::Issuable::Clone::BaseService
- def initialize(current_user, original_entity, new_entity)
- @current_user = current_user
- @original_entity = original_entity
- @new_entity = new_entity
- end
-
- def execute
- update_attributes = { labels: cloneable_labels }
-
- milestone = matching_milestone(original_entity.milestone&.title)
- update_attributes[:milestone] = milestone if milestone.present?
-
- new_entity.update(update_attributes)
-
- copy_resource_label_events
- copy_resource_milestone_events
- copy_resource_state_events
- end
-
- private
-
- def matching_milestone(title)
- return if title.blank? || !new_entity.supports_milestone?
-
- params = { title: title, project_ids: new_entity.project&.id, group_ids: group&.id }
-
- milestones = MilestonesFinder.new(params).execute
- milestones.first
- end
-
- def cloneable_labels
- params = {
- project_id: new_entity.project&.id,
- group_id: group&.id,
- title: original_entity.labels.select(:title),
- include_ancestor_groups: true
- }
-
- params[:only_group_labels] = true if new_parent.is_a?(Group)
-
- LabelsFinder.new(current_user, params).execute
- end
-
- def copy_resource_label_events
- copy_events(ResourceLabelEvent.table_name, original_entity.resource_label_events) do |event|
- event.attributes
- .except('id', 'reference', 'reference_html')
- .merge(entity_key => new_entity.id, 'action' => ResourceLabelEvent.actions[event.action])
- end
- end
-
- def copy_resource_milestone_events
- return unless milestone_events_supported?
-
- copy_events(ResourceMilestoneEvent.table_name, original_entity.resource_milestone_events) do |event|
- if event.remove?
- event_attributes_with_milestone(event, nil)
- else
- matching_destination_milestone = matching_milestone(event.milestone_title)
-
- event_attributes_with_milestone(event, matching_destination_milestone) if matching_destination_milestone.present?
- end
- end
- end
-
- def copy_resource_state_events
- return unless state_events_supported?
-
- copy_events(ResourceStateEvent.table_name, original_entity.resource_state_events) do |event|
- event.attributes
- .except(*blocked_state_event_attributes)
- .merge(entity_key => new_entity.id,
- 'state' => ResourceStateEvent.states[event.state])
- end
- end
-
- # Overriden on EE::Issuable::Clone::AttributesRewriter
- def blocked_state_event_attributes
- ['id']
- end
-
- def event_attributes_with_milestone(event, milestone)
- event.attributes
- .except('id')
- .merge(entity_key => new_entity.id,
- 'milestone_id' => milestone&.id,
- 'action' => ResourceMilestoneEvent.actions[event.action],
- 'state' => ResourceMilestoneEvent.states[event.state])
- end
-
- def copy_events(table_name, events_to_copy)
- events_to_copy.find_in_batches do |batch|
- events = batch.map do |event|
- yield(event)
- end.compact
-
- ApplicationRecord.legacy_bulk_insert(table_name, events) # rubocop:disable Gitlab/BulkInsert
- end
- end
-
- def entity_key
- new_entity.class.name.underscore.foreign_key
- end
-
- def milestone_events_supported?
- both_respond_to?(:resource_milestone_events)
- end
-
- def state_events_supported?
- both_respond_to?(:resource_state_events)
- end
-
- def both_respond_to?(method)
- original_entity.respond_to?(method) &&
- new_entity.respond_to?(method)
- end
- end
- end
-end
-
-Issuable::Clone::AttributesRewriter.prepend_mod_with('Issuable::Clone::AttributesRewriter')
diff --git a/app/services/issuable/clone/base_service.rb b/app/services/issuable/clone/base_service.rb
index ce9918a4b56..98c50347719 100644
--- a/app/services/issuable/clone/base_service.rb
+++ b/app/services/issuable/clone/base_service.rb
@@ -3,13 +3,13 @@
module Issuable
module Clone
class BaseService < IssuableBaseService
- attr_reader :original_entity, :new_entity, :target_project
+ attr_reader :original_entity, :new_entity
alias_method :old_project, :project
- def execute(original_entity, target_project = nil)
+ def execute(original_entity, target_parent)
@original_entity = original_entity
- @target_project = target_project
+ @target_parent = target_parent
# Using transaction because of a high resources footprint
# on rewriting notes (unfolding references)
@@ -25,19 +25,21 @@ module Issuable
private
- def copy_award_emoji
- AwardEmojis::CopyService.new(original_entity, new_entity).execute
- end
+ attr_reader :target_parent
- def copy_notes
- Notes::CopyService.new(current_user, original_entity, new_entity).execute
+ def rewritten_old_entity_attributes(include_milestone: true)
+ Gitlab::Issuable::Clone::AttributesRewriter.new(
+ current_user,
+ original_entity,
+ target_parent
+ ).execute(include_milestone: include_milestone)
end
def update_new_entity
update_new_entity_description
- update_new_entity_attributes
copy_award_emoji
copy_notes
+ copy_resource_events
end
def update_new_entity_description
@@ -52,8 +54,16 @@ module Issuable
new_entity.update!(update_description_params)
end
- def update_new_entity_attributes
- AttributesRewriter.new(current_user, original_entity, new_entity).execute
+ def copy_award_emoji
+ AwardEmojis::CopyService.new(original_entity, new_entity).execute
+ end
+
+ def copy_notes
+ Notes::CopyService.new(current_user, original_entity, new_entity).execute
+ end
+
+ def copy_resource_events
+ Gitlab::Issuable::Clone::CopyResourceEventsService.new(current_user, original_entity, new_entity).execute
end
def update_old_entity
@@ -74,14 +84,8 @@ module Issuable
new_entity.resource_parent
end
- def group
- if new_entity.project&.group && current_user.can?(:read_group, new_entity.project.group)
- new_entity.project.group
- end
- end
-
def relative_position
- return if original_entity.project.root_ancestor.id != target_project.root_ancestor.id
+ return if original_entity.project.root_ancestor.id != target_parent.root_ancestor.id
original_entity.relative_position
end
diff --git a/app/services/issuable/import_csv/base_service.rb b/app/services/issuable/import_csv/base_service.rb
index 4a2078a4e60..9b41c88159f 100644
--- a/app/services/issuable/import_csv/base_service.rb
+++ b/app/services/issuable/import_csv/base_service.rb
@@ -23,7 +23,8 @@ module Issuable
with_csv_lines.each do |row, line_no|
issuable_attributes = {
title: row[:title],
- description: row[:description]
+ description: row[:description],
+ due_date: row[:due_date]
}
if create_issuable(issuable_attributes).persisted?
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 03115416607..acd6d45af7a 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -162,8 +162,6 @@ class IssuableBaseService < ::BaseProjectService
return unless result.success? && result[:escalation_status].present?
- @escalation_status_change_reason = result[:escalation_status].delete(:status_change_reason)
-
params[:incident_management_issuable_escalation_status_attributes] = result[:escalation_status]
end
@@ -231,7 +229,7 @@ class IssuableBaseService < ::BaseProjectService
before_create(issuable)
issuable_saved = issuable.with_transaction_returning_status do
- issuable.save
+ transaction_create(issuable)
end
if issuable_saved
@@ -282,8 +280,9 @@ class IssuableBaseService < ::BaseProjectService
assign_requested_labels(issuable)
assign_requested_assignees(issuable)
assign_requested_crm_contacts(issuable)
+ widget_params = filter_widget_params
- if issuable.changed? || params.present?
+ if issuable.changed? || params.present? || widget_params.present?
issuable.assign_attributes(allowed_update_params(params))
if has_title_or_description_changed?(issuable)
@@ -303,7 +302,7 @@ class IssuableBaseService < ::BaseProjectService
ensure_milestone_available(issuable)
issuable_saved = issuable.with_transaction_returning_status do
- issuable.save(touch: should_touch)
+ transaction_update(issuable, { save_with_touch: should_touch })
end
if issuable_saved
@@ -332,6 +331,16 @@ class IssuableBaseService < ::BaseProjectService
issuable
end
+ def transaction_update(issuable, opts = {})
+ touch = opts[:save_with_touch] || false
+
+ issuable.save(touch: touch)
+ end
+
+ def transaction_create(issuable)
+ issuable.save
+ end
+
def update_task(issuable)
filter_params(issuable)
@@ -590,6 +599,10 @@ class IssuableBaseService < ::BaseProjectService
issuable_sla.update(issuable_closed: issuable.closed?)
end
+
+ def filter_widget_params
+ params.delete(:widget_params)
+ end
end
IssuableBaseService.prepend_mod_with('IssuableBaseService')
diff --git a/app/services/issuable_links/create_service.rb b/app/services/issuable_links/create_service.rb
index 0887f04760c..aca98596a02 100644
--- a/app/services/issuable_links/create_service.rb
+++ b/app/services/issuable_links/create_service.rb
@@ -8,6 +8,7 @@ module IssuableLinks
@issuable = issuable
@current_user = user
@params = params.dup
+ @errors = []
end
def execute
@@ -22,7 +23,6 @@ module IssuableLinks
return error(issuables_not_found_message, 404)
end
- @errors = []
references = create_links
if @errors.present?
diff --git a/app/services/issues/build_service.rb b/app/services/issues/build_service.rb
index 1ebf9bb6ba2..75bd2b88e86 100644
--- a/app/services/issues/build_service.rb
+++ b/app/services/issues/build_service.rb
@@ -81,8 +81,9 @@ module Issues
::Issue
end
- def allowed_issue_params
- allowed_params = [
+ def public_params
+ # Additional params may be assigned later (in a CreateService for example)
+ public_issue_params = [
:title,
:description,
:confidential
@@ -90,17 +91,17 @@ module Issues
params[:work_item_type] = WorkItems::Type.find_by(id: params[:work_item_type_id]) if params[:work_item_type_id].present? # rubocop: disable CodeReuse/ActiveRecord
- allowed_params << :milestone_id if can?(current_user, :admin_issue, project)
- allowed_params << :issue_type if create_issue_type_allowed?(project, params[:issue_type])
- allowed_params << :work_item_type if create_issue_type_allowed?(project, params[:work_item_type]&.base_type)
+ public_issue_params << :milestone_id if can?(current_user, :admin_issue, project)
+ public_issue_params << :issue_type if create_issue_type_allowed?(project, params[:issue_type])
+ public_issue_params << :work_item_type if create_issue_type_allowed?(project, params[:work_item_type]&.base_type)
- params.slice(*allowed_params)
+ params.slice(*public_issue_params)
end
def build_issue_params
{ author: current_user }
.merge(issue_params_with_info_from_discussions)
- .merge(allowed_issue_params)
+ .merge(public_params)
.with_indifferent_access
end
diff --git a/app/services/issues/clone_service.rb b/app/services/issues/clone_service.rb
index c675f957cd7..896b15a14b8 100644
--- a/app/services/issues/clone_service.rb
+++ b/app/services/issues/clone_service.rb
@@ -41,9 +41,12 @@ module Issues
def update_new_entity
# we don't call `super` because we want to be able to decide whether or not to copy all comments over.
update_new_entity_description
- update_new_entity_attributes
copy_award_emoji
- copy_notes if with_notes
+
+ if with_notes
+ copy_notes
+ copy_resource_events
+ end
end
def update_old_entity
@@ -62,14 +65,18 @@ module Issues
}
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
+ new_params = new_params.merge(rewritten_old_entity_attributes)
+ new_params.delete(:created_at)
+ new_params.delete(:updated_at)
# spam checking is not necessary, as no new content is being created. Passing nil for
# spam_params will cause SpamActionService to skip checking and return a success response.
spam_params = nil
- # Skip creation of system notes for existing attributes of the issue. The system notes of the old
- # issue are copied over so we don't want to end up with duplicate notes.
- CreateService.new(project: target_project, current_user: current_user, params: new_params, spam_params: spam_params).execute(skip_system_notes: true)
+ # Skip creation of system notes for existing attributes of the issue when cloning with notes.
+ # The system notes of the old issue are copied over so we don't want to end up with duplicate notes.
+ # When cloning without notes, we want to generate system notes for the attributes that were copied.
+ CreateService.new(project: target_project, current_user: current_user, params: new_params, spam_params: spam_params).execute(skip_system_notes: with_notes)
end
def queue_copy_designs
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index ff45091c7e6..d08e4d12a92 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -97,7 +97,10 @@ module Issues
status = issue.incident_management_issuable_escalation_status || issue.build_incident_management_issuable_escalation_status
- SystemNoteService.change_incident_status(issue, current_user, ' by closing the incident') if status.resolve
+ return unless status.resolve
+
+ SystemNoteService.change_incident_status(issue, current_user, ' by closing the incident')
+ IncidentManagement::TimelineEvents::CreateService.resolve_incident(issue, current_user)
end
def store_first_mentioned_in_commit_at(issue, merge_request, max_commit_lookup: 100)
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index edf6d75b632..30d4cb68840 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -13,6 +13,7 @@ module Issues
# in the caller (for example, an issue created via email) and the required arguments to the
# SpamParams constructor are not otherwise available, spam_params: must be explicitly passed as nil.
def initialize(project:, current_user: nil, params: {}, spam_params:, build_service: nil)
+ @extra_params = params.delete(:extra_params) || {}
super(project: project, current_user: current_user, params: params)
@spam_params = spam_params
@build_service = build_service || BuildService.new(project: project, current_user: current_user, params: params)
@@ -46,7 +47,7 @@ module Issues
issue.run_after_commit do
NewIssueWorker.perform_async(issue.id, user.id)
Issues::PlacementWorker.perform_async(nil, issue.project_id)
- Namespaces::OnboardingIssueCreatedWorker.perform_async(issue.namespace.id)
+ Namespaces::OnboardingIssueCreatedWorker.perform_async(issue.project.namespace_id)
end
end
@@ -56,7 +57,8 @@ module Issues
handle_add_related_issue(issue)
resolve_discussions_with_issue(issue)
create_escalation_status(issue)
- try_to_associate_contact(issue)
+ create_timeline_event(issue)
+ try_to_associate_contacts(issue)
super
end
@@ -85,12 +87,18 @@ module Issues
private
- attr_reader :spam_params
+ attr_reader :spam_params, :extra_params
def create_escalation_status(issue)
::IncidentManagement::IssuableEscalationStatuses::CreateService.new(issue).execute if issue.supports_escalation?
end
+ def create_timeline_event(issue)
+ return unless issue.incident?
+
+ IncidentManagement::TimelineEvents::CreateService.create_incident(issue, current_user)
+ end
+
def user_agent_detail_service
UserAgentDetailService.new(spammable: @issue, spam_params: spam_params)
end
@@ -101,11 +109,14 @@ module Issues
IssueLinks::CreateService.new(issue, issue.author, { target_issuable: @add_related_issue }).execute
end
- def try_to_associate_contact(issue)
+ def try_to_associate_contacts(issue)
return unless issue.external_author
return unless current_user.can?(:set_issue_crm_contacts, issue)
- set_crm_contacts(issue, [issue.external_author])
+ contacts = [issue.external_author]
+ contacts.concat extra_params[:cc] unless extra_params[:cc].nil?
+
+ set_crm_contacts(issue, contacts)
end
end
end
diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb
index d210ba2a76c..edab62b1fdf 100644
--- a/app/services/issues/move_service.rb
+++ b/app/services/issues/move_service.rb
@@ -76,6 +76,7 @@ module Issues
}
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
+ new_params = new_params.merge(rewritten_old_entity_attributes)
# spam checking is not necessary, as no new content is being created. Passing nil for
# spam_params will cause SpamActionService to skip checking and return a success response.
spam_params = nil
diff --git a/app/services/issues/related_branches_service.rb b/app/services/issues/related_branches_service.rb
index 8b08c1f8ddb..2ecd3e561c9 100644
--- a/app/services/issues/related_branches_service.rb
+++ b/app/services/issues/related_branches_service.rb
@@ -5,26 +5,23 @@
module Issues
class RelatedBranchesService < Issues::BaseService
def execute(issue)
- branch_names = branches_with_iid_of(issue) - branches_with_merge_request_for(issue)
- branch_names.map { |branch_name| branch_data(branch_name) }
+ branch_names_with_mrs = branches_with_merge_request_for(issue)
+ branches = branches_with_iid_of(issue).reject { |b| branch_names_with_mrs.include?(b[:name]) }
+
+ branches.map { |branch| branch_data(branch) }
end
private
- def branch_data(branch_name)
+ def branch_data(branch)
{
- name: branch_name,
- pipeline_status: pipeline_status(branch_name)
+ name: branch[:name],
+ pipeline_status: pipeline_status(branch)
}
end
- def pipeline_status(branch_name)
- branch = project.repository.find_branch(branch_name)
- target = branch&.dereferenced_target
-
- return unless target
-
- pipeline = project.latest_pipeline(branch_name, target.sha)
+ def pipeline_status(branch)
+ pipeline = project.latest_pipeline(branch[:name], branch[:target])
pipeline.detailed_status(current_user) if can?(current_user, :read_pipeline, pipeline)
end
@@ -36,8 +33,16 @@ module Issues
end
def branches_with_iid_of(issue)
- project.repository.branch_names.select do |branch|
- branch =~ /\A#{issue.iid}-(?!\d+-stable)/i
+ branch_ref_regex = /\A#{Gitlab::Git::BRANCH_REF_PREFIX}#{issue.iid}-(?!\d+-stable)/i
+
+ return [] unless project.repository.exists?
+
+ project.repository.list_refs(
+ [Gitlab::Git::BRANCH_REF_PREFIX + "#{issue.iid}-*"]
+ ).each_with_object([]) do |ref, results|
+ if ref.name.match?(branch_ref_regex)
+ results << { name: ref.name.delete_prefix(Gitlab::Git::BRANCH_REF_PREFIX), target: ref.target }
+ end
end
end
end
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index 4abd1dfbf4e..e003ecacb3f 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -32,11 +32,18 @@ module Issues
end
def perform_incident_management_actions(issue)
+ return unless issue.incident?
+
+ create_timeline_event(issue)
end
def create_note(issue, state = issue.state)
SystemNoteService.change_status(issue, issue.project, current_user, state, nil)
end
+
+ def create_timeline_event(issue)
+ IncidentManagement::TimelineEvents::CreateService.reopen_incident(issue, current_user)
+ end
end
end
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index d9210169005..afc61eed287 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -199,8 +199,7 @@ module Issues
::IncidentManagement::IssuableEscalationStatuses::AfterUpdateService.new(
issue,
- current_user,
- status_change_reason: @escalation_status_change_reason # Defined in IssuableBaseService before save
+ current_user
).execute
end
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index 57d9da4cefd..38bebc1d09d 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -84,7 +84,7 @@ module Members
end
def add_members
- @members = source.add_users(
+ @members = source.add_members(
invites,
params[:access_level],
expires_at: params[:expires_at],
diff --git a/app/services/members/creator_service.rb b/app/services/members/creator_service.rb
index 276093a00a9..f59a3ed77eb 100644
--- a/app/services/members/creator_service.rb
+++ b/app/services/members/creator_service.rb
@@ -13,9 +13,9 @@ module Members
Gitlab::Access.sym_options_with_owner
end
- def add_users( # rubocop:disable Metrics/ParameterLists
+ def add_members( # rubocop:disable Metrics/ParameterLists
source,
- users,
+ invitees,
access_level,
current_user: nil,
expires_at: nil,
@@ -24,41 +24,49 @@ module Members
ldap: nil,
blocking_refresh: nil
)
- return [] unless users.present?
+ return [] unless invitees.present?
# If this user is attempting to manage Owner members and doesn't have permission, do not allow
return [] if managing_owners?(current_user, access_level) && cannot_manage_owners?(source, current_user)
- emails, users, existing_members = parse_users_list(source, users)
+ emails, users, existing_members = parse_users_list(source, invitees)
Member.transaction do
- (emails + users).map! do |user|
- new(source,
- user,
- access_level,
- existing_members: existing_members,
- current_user: current_user,
- expires_at: expires_at,
- tasks_to_be_done: tasks_to_be_done,
- tasks_project_id: tasks_project_id,
- ldap: ldap,
- blocking_refresh: blocking_refresh)
- .execute
+ common_arguments = {
+ source: source,
+ access_level: access_level,
+ existing_members: existing_members,
+ current_user: current_user,
+ expires_at: expires_at,
+ tasks_to_be_done: tasks_to_be_done,
+ tasks_project_id: tasks_project_id,
+ ldap: ldap,
+ blocking_refresh: blocking_refresh
+ }
+
+ members = emails.map do |email|
+ new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute
end
+
+ members += users.map do |user|
+ new(invitee: user, **common_arguments).execute
+ end
+
+ members
end
end
- def add_user( # rubocop:disable Metrics/ParameterLists
+ def add_member( # rubocop:disable Metrics/ParameterLists
source,
- user,
+ invitee,
access_level,
current_user: nil,
expires_at: nil,
ldap: nil,
blocking_refresh: nil
)
- add_users(source,
- [user],
+ add_members(source,
+ [invitee],
access_level,
current_user: current_user,
expires_at: expires_at,
@@ -113,11 +121,11 @@ module Members
end
end
- def initialize(source, user, access_level, **args)
- @source = source
- @user = user
- @access_level = self.class.parsed_access_level(access_level)
+ def initialize(invitee:, builder: StandardMemberBuilder, **args)
+ @invitee = invitee
+ @builder = builder
@args = args
+ @access_level = self.class.parsed_access_level(args[:access_level])
end
private_class_method :new
@@ -133,7 +141,7 @@ module Members
private
delegate :new_record?, to: :member
- attr_reader :source, :user, :access_level, :member, :args
+ attr_reader :invitee, :access_level, :member, :args, :builder
def assign_member_attributes
member.attributes = member_attributes
@@ -170,7 +178,7 @@ module Members
# Populates the attributes of a member.
#
# This logic resides in a separate method so that EE can extend this logic,
- # without having to patch the `add_user` method directly.
+ # without having to patch the `add_members` method directly.
def member_attributes
{
created_by: member.created_by || current_user,
@@ -182,14 +190,14 @@ module Members
def commit_changes
if member.request?
approve_request
- else
+ elsif member.changed?
# Calling #save triggers callbacks even if there is no change on object.
# This previously caused an incident due to the hard to predict
# behaviour caused by the large number of callbacks.
# See https://gitlab.com/gitlab-com/gl-infra/production/-/issues/6351
# and https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80920#note_911569038
# for details.
- member.save if member.changed?
+ member.save
end
end
@@ -241,43 +249,19 @@ module Members
end
def find_or_build_member
- @user = parse_user_param
-
- @member = if user.is_a?(User)
- find_or_initialize_member_by_user
- else
- source.members.build(invite_email: user)
- end
+ @member = builder.new(source, invitee, existing_members).execute
@member.blocking_refresh = args[:blocking_refresh]
end
- # This method is used to find users that have been entered into the "Add members" field.
- # These can be the User objects directly, their IDs, their emails, or new emails to be invited.
- def parse_user_param
- case user
- when User
- user
- when Integer
- # might not return anything - this needs enhancement
- User.find_by(id: user) # rubocop:todo CodeReuse/ActiveRecord
- else
- # must be an email or at least we'll consider it one
- source.users_by_emails([user])[user] || user
- end
- end
-
- def find_or_initialize_member_by_user
- # We have to use `members_and_requesters` here since the given `members` is modified in the models
- # to act more like a scope(removing the requested_at members) and therefore ActiveRecord has issues with that
- # on build and refreshing that relation.
- existing_members[user.id] || source.members_and_requesters.build(user_id: user.id) # rubocop:disable CodeReuse/ActiveRecord
- end
-
def ldap
args[:ldap] || false
end
+ def source
+ args[:source]
+ end
+
def existing_members
args[:existing_members] || {}
end
diff --git a/app/services/members/invite_member_builder.rb b/app/services/members/invite_member_builder.rb
new file mode 100644
index 00000000000..e925121bb1e
--- /dev/null
+++ b/app/services/members/invite_member_builder.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Members
+ class InviteMemberBuilder < StandardMemberBuilder
+ def execute
+ if user_by_email
+ find_or_initialize_member_by_user(user_by_email.id)
+ else
+ source.members_and_requesters.find_or_initialize_by(invite_email: invitee) # rubocop:disable CodeReuse/ActiveRecord
+ end
+ end
+
+ private
+
+ def user_by_email
+ source.users_by_emails([invitee])[invitee]
+ end
+ end
+end
diff --git a/app/services/members/invite_service.rb b/app/services/members/invite_service.rb
index 1bf209ab79d..6d23a9bc2dc 100644
--- a/app/services/members/invite_service.rb
+++ b/app/services/members/invite_service.rb
@@ -31,8 +31,8 @@ module Members
return if params[:email].blank?
- # we need the below due to add_users hitting Members::CreatorService.parse_users_list and ignoring invalid emails
- # ideally we wouldn't need this, but we can't really change the add_users method
+ # we need the below due to add_member hitting Members::CreatorService.parse_users_list and ignoring invalid emails
+ # ideally we wouldn't need this, but we can't really change the add_members method
invalid_emails.each { |email| errors[email] = s_('AddMember|Invite email is invalid') }
end
diff --git a/app/services/members/projects/creator_service.rb b/app/services/members/projects/creator_service.rb
index cde1d0462e8..f45132749f9 100644
--- a/app/services/members/projects/creator_service.rb
+++ b/app/services/members/projects/creator_service.rb
@@ -32,7 +32,7 @@ module Members
end
def adding_the_creator_as_owner_in_a_personal_project?
- # this condition is reached during testing setup a lot due to use of `.add_user`
+ # this condition is reached during testing setup a lot due to use of `.add_member`
member.project.personal_namespace_holder?(member.user)
end
diff --git a/app/services/members/standard_member_builder.rb b/app/services/members/standard_member_builder.rb
new file mode 100644
index 00000000000..24e71f80d7e
--- /dev/null
+++ b/app/services/members/standard_member_builder.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Members
+ class StandardMemberBuilder
+ def initialize(source, invitee, existing_members)
+ @source = source
+ @invitee = invitee
+ @existing_members = existing_members
+ end
+
+ def execute
+ find_or_initialize_member_by_user(invitee.id)
+ end
+
+ private
+
+ attr_reader :source, :invitee, :existing_members
+
+ def find_or_initialize_member_by_user(user_id)
+ existing_members[user_id] || source.members_and_requesters.build(user_id: user_id) # rubocop:disable CodeReuse/ActiveRecord
+ end
+ end
+end
diff --git a/app/services/merge_requests/approval_service.rb b/app/services/merge_requests/approval_service.rb
index e3f0758699b..b8d817a15f3 100644
--- a/app/services/merge_requests/approval_service.rb
+++ b/app/services/merge_requests/approval_service.rb
@@ -16,7 +16,7 @@ module MergeRequests
mark_pending_todos_as_done(merge_request)
execute_approval_hooks(merge_request, current_user)
remove_attention_requested(merge_request)
- merge_request_activity_counter.track_approve_mr_action(user: current_user)
+ merge_request_activity_counter.track_approve_mr_action(user: current_user, merge_request: merge_request)
success
end
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index 2b6a66b9dee..9bd38478796 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -128,13 +128,8 @@ module MergeRequests
if draft_event = params.delete(:wip_event)
# We update the title that is provided in the params or we use the mr title
title = params[:title] || merge_request.title
- # Supports both `wip` and `draft` permutations of draft_event
- # This support can be removed >= %15.2
- #
params[:title] = case draft_event
- when 'wip' then MergeRequest.draft_title(title)
when 'draft' then MergeRequest.draft_title(title)
- when 'unwip' then MergeRequest.draftless_title(title)
when 'ready' then MergeRequest.draftless_title(title)
end
end
@@ -190,8 +185,11 @@ module MergeRequests
def create_pipeline_for(merge_request, user, async: false)
if async
- # TODO: pass push_options to worker
- MergeRequests::CreatePipelineWorker.perform_async(project.id, user.id, merge_request.id)
+ MergeRequests::CreatePipelineWorker.perform_async(
+ project.id,
+ user.id,
+ merge_request.id,
+ params.slice(:push_options).deep_stringify_keys)
else
MergeRequests::CreatePipelineService
.new(project: project, current_user: user, params: params.slice(:push_options))
diff --git a/app/services/merge_requests/create_pipeline_service.rb b/app/services/merge_requests/create_pipeline_service.rb
index 37c734613e7..c6a91a3b61e 100644
--- a/app/services/merge_requests/create_pipeline_service.rb
+++ b/app/services/merge_requests/create_pipeline_service.rb
@@ -50,12 +50,8 @@ module MergeRequests
end
def can_create_pipeline_in_target_project?(merge_request)
- if ::Feature.enabled?(:ci_disallow_to_create_merge_request_pipelines_in_target_project, merge_request.target_project)
- merge_request.for_same_project?
- else
- can?(current_user, :create_pipeline, merge_request.target_project) &&
- can_update_source_branch_in_target_project?(merge_request)
- end
+ can?(current_user, :create_pipeline, merge_request.target_project) &&
+ can_update_source_branch_in_target_project?(merge_request)
end
def can_update_source_branch_in_target_project?(merge_request)
diff --git a/app/services/namespaces/in_product_marketing_emails_service.rb b/app/services/namespaces/in_product_marketing_emails_service.rb
index 414f253deb8..c139b2e11dd 100644
--- a/app/services/namespaces/in_product_marketing_emails_service.rb
+++ b/app/services/namespaces/in_product_marketing_emails_service.rb
@@ -37,11 +37,6 @@ module Namespaces
interval_days: [1, 5, 10],
completed_actions: [:git_write, :pipeline_created, :trial_started],
incomplete_actions: [:user_added]
- },
- experience: {
- interval_days: [30],
- completed_actions: [:created, :git_write],
- incomplete_actions: []
}
}.freeze
diff --git a/app/services/notification_recipients/builder/base.rb b/app/services/notification_recipients/builder/base.rb
index 4f1bb0dc877..0a7f25f1af3 100644
--- a/app/services/notification_recipients/builder/base.rb
+++ b/app/services/notification_recipients/builder/base.rb
@@ -47,6 +47,8 @@ module NotificationRecipients
end
users = Array(users).compact
+ preload_users_namespace_bans(users)
+
recipients.concat(users.map { |u| make_recipient(u, type, reason) })
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -240,6 +242,14 @@ module NotificationRecipients
add_recipients(label.subscribers(project), :subscription, NotificationReason::SUBSCRIBED)
end
end
+
+ private
+
+ def preload_users_namespace_bans(_users)
+ # overridden in EE
+ end
end
end
end
+
+NotificationRecipients::Builder::Base.prepend_mod_with('NotificationRecipients::Builder::Base')
diff --git a/app/services/packages/cleanup/execute_policy_service.rb b/app/services/packages/cleanup/execute_policy_service.rb
new file mode 100644
index 00000000000..b432f6d0acb
--- /dev/null
+++ b/app/services/packages/cleanup/execute_policy_service.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+module Packages
+ module Cleanup
+ class ExecutePolicyService
+ include Gitlab::Utils::StrongMemoize
+
+ MAX_EXECUTION_TIME = 250.seconds
+
+ DUPLICATED_FILES_BATCH_SIZE = 10_000
+ MARK_PACKAGE_FILES_FOR_DESTRUCTION_SERVICE_BATCH_SIZE = 200
+
+ def initialize(policy)
+ @policy = policy
+ @counts = {
+ marked_package_files_total_count: 0,
+ unique_package_id_and_file_name_total_count: 0
+ }
+ end
+
+ def execute
+ cleanup_duplicated_files
+ end
+
+ private
+
+ def cleanup_duplicated_files
+ return if @policy.keep_n_duplicated_package_files_disabled?
+
+ result = installable_package_files.each_batch(of: DUPLICATED_FILES_BATCH_SIZE) do |package_files|
+ break :timeout if cleanup_duplicated_files_on(package_files) == :timeout
+ end
+
+ response_success(timeout: result == :timeout)
+ end
+
+ def cleanup_duplicated_files_on(package_files)
+ unique_package_id_and_file_name_from(package_files).each do |package_id, file_name|
+ result = remove_duplicated_files_for(package_id: package_id, file_name: file_name)
+ @counts[:marked_package_files_total_count] += result.payload[:marked_package_files_count]
+ @counts[:unique_package_id_and_file_name_total_count] += 1
+
+ break :timeout unless result.success?
+ end
+ end
+
+ def unique_package_id_and_file_name_from(package_files)
+ # This is a highly custom query for this service, that's why it's not in the model.
+ # rubocop: disable CodeReuse/ActiveRecord
+ package_files.group(:package_id, :file_name)
+ .having("COUNT(*) > #{@policy.keep_n_duplicated_package_files}")
+ .pluck(:package_id, :file_name)
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+
+ def remove_duplicated_files_for(package_id:, file_name:)
+ base = ::Packages::PackageFile.for_package_ids(package_id)
+ .installable
+ .with_file_name(file_name)
+ ids_to_keep = base.recent
+ .limit(@policy.keep_n_duplicated_package_files)
+ .pluck_primary_key
+
+ duplicated_package_files = base.id_not_in(ids_to_keep)
+ ::Packages::MarkPackageFilesForDestructionService.new(duplicated_package_files)
+ .execute(batch_deadline: batch_deadline, batch_size: MARK_PACKAGE_FILES_FOR_DESTRUCTION_SERVICE_BATCH_SIZE)
+ end
+
+ def project
+ @policy.project
+ end
+
+ def installable_package_files
+ ::Packages::PackageFile.installable
+ .for_package_ids(
+ ::Packages::Package.installable
+ .for_projects(project.id)
+ )
+ end
+
+ def batch_deadline
+ strong_memoize(:batch_deadline) do
+ MAX_EXECUTION_TIME.from_now
+ end
+ end
+
+ def response_success(timeout:)
+ ServiceResponse.success(
+ message: "Packages cleanup policy executed for project #{project.id}",
+ payload: {
+ timeout: timeout,
+ counts: @counts
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/packages/debian/create_package_file_service.rb b/app/services/packages/debian/create_package_file_service.rb
index 2022a63a725..fbbc8159ca0 100644
--- a/app/services/packages/debian/create_package_file_service.rb
+++ b/app/services/packages/debian/create_package_file_service.rb
@@ -3,12 +3,15 @@
module Packages
module Debian
class CreatePackageFileService
+ include ::Packages::FIPS
+
def initialize(package, params)
@package = package
@params = params
end
def execute
+ raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
raise ArgumentError, "Invalid package" unless package.present?
# Debian package file are first uploaded to incoming with empty metadata,
diff --git a/app/services/packages/debian/extract_changes_metadata_service.rb b/app/services/packages/debian/extract_changes_metadata_service.rb
index 43a4db5bdfc..30480834748 100644
--- a/app/services/packages/debian/extract_changes_metadata_service.rb
+++ b/app/services/packages/debian/extract_changes_metadata_service.rb
@@ -4,6 +4,7 @@ module Packages
module Debian
class ExtractChangesMetadataService
include Gitlab::Utils::StrongMemoize
+ include ::Packages::FIPS
ExtractionError = Class.new(StandardError)
@@ -13,6 +14,8 @@ module Packages
end
def execute
+ raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
+
{
file_type: file_type,
architecture: metadata[:architecture],
diff --git a/app/services/packages/debian/generate_distribution_service.rb b/app/services/packages/debian/generate_distribution_service.rb
index 33bf877a153..7db27f9234d 100644
--- a/app/services/packages/debian/generate_distribution_service.rb
+++ b/app/services/packages/debian/generate_distribution_service.rb
@@ -4,6 +4,7 @@ module Packages
module Debian
class GenerateDistributionService
include Gitlab::Utils::StrongMemoize
+ include ::Packages::FIPS
include ExclusiveLeaseGuard
ONE_HOUR = 1.hour.freeze
@@ -70,6 +71,8 @@ module Packages
end
def execute
+ raise DisabledError, 'Debian registry is not FIPS compliant' if Gitlab::FIPS.enabled?
+
try_obtain_lease do
@distribution.transaction do
# We consider `apt-get update` can take at most one hour
diff --git a/app/services/packages/mark_package_files_for_destruction_service.rb b/app/services/packages/mark_package_files_for_destruction_service.rb
index 3672b44b409..e7fdd88843a 100644
--- a/app/services/packages/mark_package_files_for_destruction_service.rb
+++ b/app/services/packages/mark_package_files_for_destruction_service.rb
@@ -9,18 +9,41 @@ module Packages
@package_files = package_files
end
- def execute
- @package_files.each_batch(of: BATCH_SIZE) do |batched_package_files|
- batched_package_files.update_all(status: :pending_destruction)
+ def execute(batch_deadline: nil, batch_size: BATCH_SIZE)
+ timeout = false
+ updates_count = 0
+ min_batch_size = [batch_size, BATCH_SIZE].min
+
+ @package_files.each_batch(of: min_batch_size) do |batched_package_files|
+ if batch_deadline && Time.zone.now > batch_deadline
+ timeout = true
+ break
+ end
+
+ updates_count += batched_package_files.update_all(status: :pending_destruction)
end
- service_response_success('Package files are now pending destruction')
+ payload = { marked_package_files_count: updates_count }
+
+ return response_error(payload) if timeout
+
+ response_success(payload)
end
private
- def service_response_success(message)
- ServiceResponse.success(message: message)
+ def response_success(payload)
+ ServiceResponse.success(
+ message: 'Package files are now pending destruction',
+ payload: payload
+ )
+ end
+
+ def response_error(payload)
+ ServiceResponse.error(
+ message: 'Timeout while marking package files as pending destruction',
+ payload: payload
+ )
end
end
end
diff --git a/app/services/packages/pypi/create_package_service.rb b/app/services/packages/pypi/create_package_service.rb
index 5d7e967ceb0..b464ce4504a 100644
--- a/app/services/packages/pypi/create_package_service.rb
+++ b/app/services/packages/pypi/create_package_service.rb
@@ -16,6 +16,8 @@ module Packages
raise ActiveRecord::RecordInvalid, meta
end
+ params.delete(:md5_digest) if Gitlab::FIPS.enabled?
+
Packages::Pypi::Metadatum.upsert(meta.attributes)
::Packages::CreatePackageFileService.new(created_package, file_params).execute
diff --git a/app/services/pages/delete_service.rb b/app/services/pages/delete_service.rb
index 95e99daeb6c..dcee4c5b665 100644
--- a/app/services/pages/delete_service.rb
+++ b/app/services/pages/delete_service.rb
@@ -21,7 +21,8 @@ module Pages
def publish_deleted_event
event = Pages::PageDeletedEvent.new(data: {
project_id: project.id,
- namespace_id: project.namespace_id
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
})
Gitlab::EventStore.publish(event)
diff --git a/app/services/pod_logs/base_service.rb b/app/services/pod_logs/base_service.rb
deleted file mode 100644
index e4b6ad31e33..00000000000
--- a/app/services/pod_logs/base_service.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-# frozen_string_literal: true
-
-module PodLogs
- class BaseService < ::BaseService
- include ReactiveCaching
- include Stepable
-
- attr_reader :cluster, :namespace, :params
-
- CACHE_KEY_GET_POD_LOG = 'get_pod_log'
- K8S_NAME_MAX_LENGTH = 253
-
- self.reactive_cache_work_type = :external_dependency
-
- def id
- cluster.id
- end
-
- def initialize(cluster, namespace, params: {})
- @cluster = cluster
- @namespace = namespace
- @params = filter_params(params.dup.stringify_keys).to_hash
- end
-
- def execute
- with_reactive_cache(
- CACHE_KEY_GET_POD_LOG,
- namespace,
- params
- ) do |result|
- result
- end
- end
-
- def calculate_reactive_cache(request, _namespace, _params)
- case request
- when CACHE_KEY_GET_POD_LOG
- execute_steps
- else
- exception = StandardError.new('Unknown reactive cache request')
- Gitlab::ErrorTracking.track_and_raise_for_dev_exception(exception, request: request)
- error(_('Unknown cache key'))
- end
- end
-
- private
-
- def valid_params
- %w(pod_name container_name)
- end
-
- def success_return_keys
- %i(status logs pod_name container_name pods)
- end
-
- def check_arguments(result)
- return error(_('Cluster does not exist')) if cluster.nil?
- return error(_('Namespace is empty')) if namespace.blank?
-
- result[:pod_name] = params['pod_name'].presence
- result[:container_name] = params['container_name'].presence
-
- return error(_('Invalid pod_name')) if result[:pod_name] && !result[:pod_name].is_a?(String)
- return error(_('Invalid container_name')) if result[:container_name] && !result[:container_name].is_a?(String)
-
- success(result)
- end
-
- def get_raw_pods(result)
- raise NotImplementedError
- end
-
- def get_pod_names(result)
- result[:pods] = result[:raw_pods].map { |p| p[:name] }
-
- success(result)
- end
-
- def pod_logs(result)
- raise NotImplementedError
- end
-
- def filter_return_keys(result)
- result.slice(*success_return_keys)
- end
-
- def filter_params(params)
- params.slice(*valid_params)
- end
- end
-end
diff --git a/app/services/pod_logs/elasticsearch_service.rb b/app/services/pod_logs/elasticsearch_service.rb
deleted file mode 100644
index 28ccace62e5..00000000000
--- a/app/services/pod_logs/elasticsearch_service.rb
+++ /dev/null
@@ -1,98 +0,0 @@
-# frozen_string_literal: true
-
-module PodLogs
- class ElasticsearchService < PodLogs::BaseService
- steps :check_arguments,
- :get_raw_pods,
- :get_pod_names,
- :check_times,
- :check_search,
- :check_cursor,
- :pod_logs,
- :filter_return_keys
-
- self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
-
- private
-
- def valid_params
- super + %w(search start_time end_time cursor)
- end
-
- def success_return_keys
- super + %i(cursor)
- end
-
- def get_raw_pods(result)
- client = cluster&.elasticsearch_client
- return error(_('Unable to connect to Elasticsearch')) unless client
-
- result[:raw_pods] = ::Gitlab::Elasticsearch::Logs::Pods.new(client).pods(namespace)
-
- success(result)
- rescue Elasticsearch::Transport::Transport::ServerError => e
- ::Gitlab::ErrorTracking.track_exception(e)
-
- error(_('Elasticsearch returned status code: %{status_code}') % {
- # ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound"
- # there is no method on the exception other than the class name to determine the type of error encountered.
- status_code: e.class.name.split('::').last
- })
- end
-
- def check_times(result)
- result[:start_time] = params['start_time'] if params.key?('start_time') && Time.iso8601(params['start_time'])
- result[:end_time] = params['end_time'] if params.key?('end_time') && Time.iso8601(params['end_time'])
-
- success(result)
- rescue ArgumentError
- error(_('Invalid start or end time format'))
- end
-
- def check_search(result)
- result[:search] = params['search'] if params.key?('search')
-
- return error(_('Invalid search parameter')) if result[:search] && !result[:search].is_a?(String)
-
- success(result)
- end
-
- def check_cursor(result)
- result[:cursor] = params['cursor'] if params.key?('cursor')
-
- return error(_('Invalid cursor parameter')) if result[:cursor] && !result[:cursor].is_a?(String)
-
- success(result)
- end
-
- def pod_logs(result)
- client = cluster&.elasticsearch_client
- return error(_('Unable to connect to Elasticsearch')) unless client
-
- response = ::Gitlab::Elasticsearch::Logs::Lines.new(client).pod_logs(
- namespace,
- pod_name: result[:pod_name],
- container_name: result[:container_name],
- search: result[:search],
- start_time: result[:start_time],
- end_time: result[:end_time],
- cursor: result[:cursor],
- chart_above_v2: cluster.elastic_stack_adapter.chart_above_v2?
- )
-
- result.merge!(response)
-
- success(result)
- rescue Elasticsearch::Transport::Transport::ServerError => e
- ::Gitlab::ErrorTracking.track_exception(e)
-
- error(_('Elasticsearch returned status code: %{status_code}') % {
- # ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound"
- # there is no method on the exception other than the class name to determine the type of error encountered.
- status_code: e.class.name.split('::').last
- })
- rescue ::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor
- error(_('Invalid cursor value provided'))
- end
- end
-end
diff --git a/app/services/pod_logs/kubernetes_service.rb b/app/services/pod_logs/kubernetes_service.rb
deleted file mode 100644
index 28b1a179635..00000000000
--- a/app/services/pod_logs/kubernetes_service.rb
+++ /dev/null
@@ -1,151 +0,0 @@
-# frozen_string_literal: true
-
-module PodLogs
- class KubernetesService < PodLogs::BaseService
- LOGS_LIMIT = 500
- REPLACEMENT_CHAR = "\u{FFFD}"
-
- EncodingHelperError = Class.new(StandardError)
-
- steps :check_arguments,
- :get_raw_pods,
- :get_pod_names,
- :check_pod_name,
- :check_container_name,
- :pod_logs,
- :encode_logs_to_utf8,
- :split_logs,
- :filter_return_keys
-
- self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
-
- private
-
- def get_raw_pods(result)
- result[:raw_pods] = cluster.kubeclient.get_pods(namespace: namespace).map do |pod|
- {
- name: pod.metadata.name,
- container_names: pod.spec.containers.map(&:name)
- }
- end
-
- success(result)
- end
-
- def check_pod_name(result)
- # If pod_name is not received as parameter, get the pod logs of the first
- # pod of this namespace.
- result[:pod_name] ||= result[:pods].first
-
- unless result[:pod_name]
- return error(_('No pods available'))
- end
-
- unless result[:pod_name].length.to_i <= K8S_NAME_MAX_LENGTH
- return error(_('pod_name cannot be larger than %{max_length}'\
- ' chars' % { max_length: K8S_NAME_MAX_LENGTH }))
- end
-
- unless result[:pod_name] =~ Gitlab::Regex.kubernetes_dns_subdomain_regex
- return error(_('pod_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character'))
- end
-
- unless result[:pods].include?(result[:pod_name])
- return error(_('Pod does not exist'))
- end
-
- success(result)
- end
-
- def check_container_name(result)
- pod_details = result[:raw_pods].find { |p| p[:name] == result[:pod_name] }
- container_names = pod_details[:container_names]
-
- # select first container if not specified
- result[:container_name] ||= container_names.first
-
- unless result[:container_name]
- return error(_('No containers available'))
- end
-
- unless result[:container_name].length.to_i <= K8S_NAME_MAX_LENGTH
- return error(_('container_name cannot be larger than'\
- ' %{max_length} chars' % { max_length: K8S_NAME_MAX_LENGTH }))
- end
-
- unless result[:container_name] =~ Gitlab::Regex.kubernetes_dns_subdomain_regex
- return error(_('container_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character'))
- end
-
- unless container_names.include?(result[:container_name])
- return error(_('Container does not exist'))
- end
-
- success(result)
- end
-
- def pod_logs(result)
- result[:logs] = cluster.kubeclient.get_pod_log(
- result[:pod_name],
- namespace,
- container: result[:container_name],
- tail_lines: LOGS_LIMIT,
- timestamps: true
- ).body
-
- success(result)
- rescue Kubeclient::ResourceNotFoundError
- error(_('Pod not found'))
- rescue Kubeclient::HttpError => e
- ::Gitlab::ErrorTracking.track_exception(e)
-
- error(_('Kubernetes API returned status code: %{error_code}') % {
- error_code: e.error_code
- })
- end
-
- # Check https://gitlab.com/gitlab-org/gitlab/issues/34965#note_292261879
- # for more details on why this is necessary.
- def encode_logs_to_utf8(result)
- return success(result) if result[:logs].nil?
- return success(result) if result[:logs].encoding == Encoding::UTF_8
-
- result[:logs] = encode_utf8(result[:logs])
-
- success(result)
- rescue EncodingHelperError
- error(_('Unable to convert Kubernetes logs encoding to UTF-8'))
- end
-
- def split_logs(result)
- result[:logs] = result[:logs].strip.lines(chomp: true).map do |line|
- # message contains a RFC3339Nano timestamp, then a space, then the log line.
- # resolution of the nanoseconds can vary, so we split on the first space
- values = line.split(' ', 2)
- {
- timestamp: values[0],
- message: values[1],
- pod: result[:pod_name]
- }
- end
-
- success(result)
- end
-
- def encode_utf8(logs)
- utf8_logs = Gitlab::EncodingHelper.encode_utf8(logs.dup, replace: REPLACEMENT_CHAR)
-
- # Gitlab::EncodingHelper.encode_utf8 can return '' or nil if an exception
- # is raised while encoding. We prefer to return an error rather than wrongly
- # display blank logs.
- no_utf8_logs = logs.present? && utf8_logs.blank?
- unexpected_encoding = utf8_logs&.encoding != Encoding::UTF_8
-
- if no_utf8_logs || unexpected_encoding
- raise EncodingHelperError, 'Could not convert Kubernetes logs to UTF-8'
- end
-
- utf8_logs
- end
- end
-end
diff --git a/app/services/preview_markdown_service.rb b/app/services/preview_markdown_service.rb
index af9c338b59e..03844c2dc7e 100644
--- a/app/services/preview_markdown_service.rb
+++ b/app/services/preview_markdown_service.rb
@@ -10,7 +10,7 @@ class PreviewMarkdownService < BaseService
text: text,
users: users,
suggestions: suggestions,
- commands: commands.join(' ')
+ commands: commands.join('<br>')
)
end
diff --git a/app/services/projects/after_rename_service.rb b/app/services/projects/after_rename_service.rb
index 2ed4346e5ca..9dc957b5be2 100644
--- a/app/services/projects/after_rename_service.rb
+++ b/app/services/projects/after_rename_service.rb
@@ -46,6 +46,7 @@ module Projects
update_repository_configuration
rename_transferred_documents
log_completion
+ publish_event
end
def first_ensure_no_registry_tags_are_present
@@ -132,6 +133,18 @@ module Projects
raise RenameFailedError, error
end
+
+ def publish_event
+ event = Projects::ProjectPathChangedEvent.new(data: {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id,
+ old_path: full_path_before,
+ new_path: full_path_after
+ })
+
+ Gitlab::EventStore.publish(event)
+ end
end
end
diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb
index f7c1240a3ba..b324ea27360 100644
--- a/app/services/projects/blame_service.rb
+++ b/app/services/projects/blame_service.rb
@@ -12,6 +12,8 @@ module Projects
@page = extract_page(params)
end
+ attr_reader :page
+
def blame
Gitlab::Blame.new(blob, commit, range: blame_range)
end
@@ -19,15 +21,14 @@ module Projects
def pagination
return unless pagination_enabled?
- Kaminari.paginate_array([], total_count: blob_lines_count)
+ Kaminari.paginate_array([], total_count: blob_lines_count, limit: per_page)
+ .tap { |pagination| pagination.max_paginates_per(per_page) }
.page(page)
- .per(per_page)
- .limit(per_page)
end
private
- attr_reader :blob, :commit, :page
+ attr_reader :blob, :commit
def blame_range
return unless pagination_enabled?
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index c7f284bec9b..9bc8bb428fb 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -129,6 +129,8 @@ module Projects
create_readme if @initialize_with_readme
create_sast_commit if @initialize_with_sast
+
+ publish_event
end
def create_project_settings
@@ -294,6 +296,16 @@ module Projects
params[:topic_list] ||= topic_list if topic_list
end
+
+ def publish_event
+ event = Projects::ProjectCreatedEvent.new(data: {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
+ })
+
+ Gitlab::EventStore.publish(event)
+ end
end
end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index bc5be5bdff3..06a44b07f9f 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -132,7 +132,7 @@ module Projects
destroy_web_hooks!
destroy_project_bots!
destroy_ci_records!
- destroy_mr_diff_commits!
+ destroy_mr_diff_relations!
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
@@ -153,23 +153,28 @@ module Projects
# cascading deletes may exceed statement timeouts, causing failures.
# (see https://gitlab.com/gitlab-org/gitlab/-/issues/346166)
#
+ # Removing merge_request_diff_files records may also cause timeouts, so they
+ # can be deleted in batches as well.
+ #
# rubocop: disable CodeReuse/ActiveRecord
- def destroy_mr_diff_commits!
+ def destroy_mr_diff_relations!
mr_batch_size = 100
delete_batch_size = 1000
project.merge_requests.each_batch(column: :iid, of: mr_batch_size) do |relation_ids|
- loop do
- inner_query = MergeRequestDiffCommit
- .select(:merge_request_diff_id, :relative_order)
- .where(merge_request_diff_id: MergeRequestDiff.where(merge_request_id: relation_ids).select(:id))
- .limit(delete_batch_size)
-
- deleted_rows = MergeRequestDiffCommit
- .where('(merge_request_diff_commits.merge_request_diff_id, merge_request_diff_commits.relative_order) IN (?)', inner_query)
- .delete_all
-
- break if deleted_rows == 0
+ [MergeRequestDiffCommit, MergeRequestDiffFile].each do |model|
+ loop do
+ inner_query = model
+ .select(:merge_request_diff_id, :relative_order)
+ .where(merge_request_diff_id: MergeRequestDiff.where(merge_request_id: relation_ids).select(:id))
+ .limit(delete_batch_size)
+
+ deleted_rows = model
+ .where("(#{model.table_name}.merge_request_diff_id, #{model.table_name}.relative_order) IN (?)", inner_query) # rubocop:disable GitlabSecurity/SqlInjection
+ .delete_all
+
+ break if deleted_rows == 0
+ end
end
end
end
@@ -212,7 +217,7 @@ module Projects
# produces smaller and faster queries to the database.
def destroy_web_hooks!
project.hooks.find_each do |web_hook|
- result = ::WebHooks::DestroyService.new(current_user).sync_destroy(web_hook)
+ result = ::WebHooks::DestroyService.new(current_user).execute(web_hook)
unless result[:status] == :success
raise_error(s_('DeleteProject|Failed to remove webhooks. Please try again or contact administrator.'))
@@ -263,8 +268,12 @@ module Projects
end
def publish_project_deleted_event_for(project)
- data = { project_id: project.id, namespace_id: project.namespace_id }
- event = Projects::ProjectDeletedEvent.new(data: data)
+ event = Projects::ProjectDeletedEvent.new(data: {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
+ })
+
Gitlab::EventStore.publish(event)
end
end
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index 3e8d6563709..70a04cd556a 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -5,7 +5,10 @@ module Projects
def execute(fork_to_project = nil)
forked_project = fork_to_project ? link_existing_project(fork_to_project) : fork_new_project
- refresh_forks_count if forked_project&.saved?
+ if forked_project&.saved?
+ refresh_forks_count
+ stream_audit_event(forked_project)
+ end
forked_project
end
@@ -62,7 +65,10 @@ module Projects
# exception.
relations_block: -> (project) { build_fork_network_member(project) },
skip_disk_validation: skip_disk_validation,
- external_authorization_classification_label: @project.external_authorization_classification_label
+ external_authorization_classification_label: @project.external_authorization_classification_label,
+ suggestion_commit_message: @project.suggestion_commit_message,
+ merge_commit_template: @project.merge_commit_template,
+ squash_commit_template: @project.squash_commit_template
}
if @project.avatar.present? && @project.avatar.image?
@@ -133,5 +139,11 @@ module Projects
def target_mr_default_target_self
@target_mr_default_target_self ||= params[:mr_default_target_self]
end
+
+ def stream_audit_event(forked_project)
+ # Defined in EE
+ end
end
end
+
+Projects::ForkService.prepend_mod
diff --git a/app/services/projects/group_links/update_service.rb b/app/services/projects/group_links/update_service.rb
index a836b96cac3..c271b0a2307 100644
--- a/app/services/projects/group_links/update_service.rb
+++ b/app/services/projects/group_links/update_service.rb
@@ -37,3 +37,5 @@ module Projects
end
end
end
+
+Projects::GroupLinks::UpdateService.prepend_mod
diff --git a/app/services/projects/move_deploy_keys_projects_service.rb b/app/services/projects/move_deploy_keys_projects_service.rb
index 98ba5eb3f13..a45b78db383 100644
--- a/app/services/projects/move_deploy_keys_projects_service.rb
+++ b/app/services/projects/move_deploy_keys_projects_service.rb
@@ -5,6 +5,10 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super
+ # The SHA256 fingerprint should be there, but just in case it isn't
+ # we want to make sure it's generated. Otherwise we might delete keys.
+ ensure_sha256_fingerprints
+
Project.transaction do
move_deploy_keys_projects
remove_remaining_deploy_keys_projects if remove_remaining_elements
@@ -15,6 +19,11 @@ module Projects
private
+ def ensure_sha256_fingerprints
+ @project.deploy_keys.each(&:ensure_sha256_fingerprint!)
+ source_project.deploy_keys.each(&:ensure_sha256_fingerprint!)
+ end
+
def move_deploy_keys_projects
non_existent_deploy_keys_projects.update_all(project_id: @project.id)
end
@@ -23,7 +32,7 @@ module Projects
def non_existent_deploy_keys_projects
source_project.deploy_keys_projects
.joins(:deploy_key)
- .where.not(keys: { fingerprint: @project.deploy_keys.select(:fingerprint) })
+ .where.not(keys: { fingerprint_sha256: @project.deploy_keys.select(:fingerprint_sha256) })
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index 7e4e0d7378e..b2166dc84c7 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -18,7 +18,6 @@ module Projects
.merge(grafana_integration_params)
.merge(prometheus_integration_params)
.merge(incident_management_setting_params)
- .merge(tracing_setting_params)
end
def alerting_setting_params
@@ -132,15 +131,6 @@ module Projects
{ incident_management_setting_attributes: attrs }
end
-
- def tracing_setting_params
- attr = params[:tracing_setting_attributes]
- return {} unless attr
-
- destroy = attr[:external_url].blank?
-
- { tracing_setting_attributes: attr.merge(_destroy: destroy) }
- end
end
end
end
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 8ded2516b97..dd1c2b94e18 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -2,21 +2,17 @@
module Projects
class UpdatePagesService < BaseService
- InvalidStateError = Class.new(StandardError)
- WrongUploadedDeploymentSizeError = Class.new(StandardError)
- BLOCK_SIZE = 32.kilobytes
- PUBLIC_DIR = 'public'
-
# old deployment can be cached by pages daemon
# so we need to give pages daemon some time update cache
# 10 minutes is enough, but 30 feels safer
OLD_DEPLOYMENTS_DESTRUCTION_DELAY = 30.minutes.freeze
- attr_reader :build
+ attr_reader :build, :deployment_update
def initialize(project, build)
@project = project
@build = build
+ @deployment_update = ::Gitlab::Pages::DeploymentUpdate.new(project, build)
end
def execute
@@ -29,20 +25,20 @@ module Projects
job.run!
end
- validate_state!
- validate_max_size!
- validate_public_folder!
- validate_max_entries!
+ return error(deployment_update.errors.first.full_message) unless deployment_update.valid?
build.artifacts_file.use_file do |artifacts_path|
- create_pages_deployment(artifacts_path, build)
- success
+ deployment = create_pages_deployment(artifacts_path, build)
+
+ break error('The uploaded artifact size does not match the expected value') unless deployment
+
+ if deployment_update.valid?
+ update_project_pages_deployment(deployment)
+ success
+ else
+ error(deployment_update.errors.first.full_message)
+ end
end
- rescue InvalidStateError => e
- error(e.message)
- rescue WrongUploadedDeploymentSizeError => e
- error("Uploading artifacts to pages storage failed")
- raise e
rescue StandardError => e
error(e.message)
raise e
@@ -53,13 +49,14 @@ module Projects
def success
@commit_status.success
@project.mark_pages_as_deployed
+ publish_deployed_event
super
end
def error(message)
register_failure
log_error("Projects::UpdatePagesService: #{message}")
- @commit_status.allow_failure = !latest?
+ @commit_status.allow_failure = !deployment_update.latest?
@commit_status.description = message
@commit_status.drop(:script_failure)
super
@@ -75,24 +72,22 @@ module Projects
def create_pages_deployment(artifacts_path, build)
sha256 = build.job_artifacts_archive.file_sha256
-
- deployment = nil
File.open(artifacts_path) do |file|
- deployment = project.pages_deployments.create!(file: file,
- file_count: entries_count,
- file_sha256: sha256,
- ci_build_id: build.id
- )
-
- if deployment.size != file.size || deployment.file.size != file.size
- raise(WrongUploadedDeploymentSizeError)
- end
+ deployment = project.pages_deployments.create!(
+ file: file,
+ file_count: deployment_update.entries_count,
+ file_sha256: sha256,
+ ci_build_id: build.id
+ )
- validate_outdated_sha!
+ break if deployment.size != file.size || deployment.file.size != file.size
- project.update_pages_deployment!(deployment)
+ deployment
end
+ end
+ def update_project_pages_deployment(deployment)
+ project.update_pages_deployment!(deployment)
DestroyPagesDeploymentsWorker.perform_in(
OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
project.id,
@@ -108,17 +103,6 @@ module Projects
build.artifacts_file.path
end
- def latest_sha
- project.commit(build.ref).try(:sha).to_s
- ensure
- # Close any file descriptors that were opened and free libgit2 buffers
- project.cleanup
- end
-
- def sha
- build.sha
- end
-
def register_attempt
pages_deployments_total_counter.increment
end
@@ -135,75 +119,14 @@ module Projects
@pages_deployments_failed_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_failed_total, "Counter of GitLab Pages deployments which failed")
end
- def validate_state!
- raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
- raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
-
- validate_outdated_sha!
- end
-
- def validate_outdated_sha!
- return if latest?
+ def publish_deployed_event
+ event = ::Pages::PageDeployedEvent.new(data: {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
+ })
- # use pipeline_id in case the build is retried
- last_deployed_pipeline_id = project.pages_metadatum&.pages_deployment&.ci_build&.pipeline_id
-
- return unless last_deployed_pipeline_id
- return if last_deployed_pipeline_id <= build.pipeline_id
-
- raise InvalidStateError, 'build SHA is outdated for this ref'
- end
-
- def latest?
- # check if sha for the ref is still the most recent one
- # this helps in case when multiple deployments happens
- sha == latest_sha
- end
-
- def validate_max_size!
- if total_size > max_size
- raise InvalidStateError, "artifacts for pages are too large: #{total_size}"
- end
- end
-
- # Calculate page size after extract
- def total_size
- @total_size ||= build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true).total_size
- end
-
- def max_size_from_settings
- Gitlab::CurrentSettings.max_pages_size.megabytes
- end
-
- def max_size
- max_pages_size = max_size_from_settings
-
- return ::Gitlab::Pages::MAX_SIZE if max_pages_size == 0
-
- max_pages_size
- end
-
- def validate_max_entries!
- if pages_file_entries_limit > 0 && entries_count > pages_file_entries_limit
- raise InvalidStateError, "pages site contains #{entries_count} file entries, while limit is set to #{pages_file_entries_limit}"
- end
- end
-
- def validate_public_folder!
- raise InvalidStateError, 'Error: The `public/` folder is missing, or not declared in `.gitlab-ci.yml`.' unless total_size > 0
- end
-
- def entries_count
- # we're using the full archive and pages daemon needs to read it
- # so we want the total count from entries, not only "public/" directory
- # because it better approximates work we need to do before we can serve the site
- @entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
- end
-
- def pages_file_entries_limit
- project.actual_limits.pages_file_entries
+ Gitlab::EventStore.publish(event)
end
end
end
-
-Projects::UpdatePagesService.prepend_mod_with('Projects::UpdatePagesService')
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index fb810af3e6b..5708421014a 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -10,6 +10,7 @@ module Projects
def execute
build_topics
remove_unallowed_params
+ mirror_operations_access_level_changes
validate!
ensure_wiki_exists if enabling_wiki?
@@ -82,6 +83,21 @@ module Projects
params.delete(:emails_disabled) unless can?(current_user, :set_emails_disabled, project)
end
+ # Temporary code to sync permissions changes as operations access setting
+ # is being split into monitor_access_level, deployments_access_level, infrastructure_access_level.
+ # To be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/364240
+ def mirror_operations_access_level_changes
+ return if Feature.enabled?(:split_operations_visibility_permissions, project)
+
+ operations_access_level = params.dig(:project_feature_attributes, :operations_access_level)
+
+ return if operations_access_level.nil?
+
+ [:monitor_access_level, :infrastructure_access_level, :feature_flags_access_level, :environments_access_level].each do |key|
+ params[:project_feature_attributes][key] = operations_access_level
+ end
+ end
+
def after_update
todos_features_changes = %w(
issues_access_level
diff --git a/app/services/protected_branches/api_service.rb b/app/services/protected_branches/api_service.rb
index d0d0737fd66..f604a57bcd1 100644
--- a/app/services/protected_branches/api_service.rb
+++ b/app/services/protected_branches/api_service.rb
@@ -10,8 +10,8 @@ module ProtectedBranches
{
name: params[:name],
allow_force_push: allow_force_push?,
- push_access_levels_attributes: AccessLevelParams.new(:push, params).access_levels,
- merge_access_levels_attributes: AccessLevelParams.new(:merge, params).access_levels
+ push_access_levels_attributes: ::ProtectedRefs::AccessLevelParams.new(:push, params).access_levels,
+ merge_access_levels_attributes: ::ProtectedRefs::AccessLevelParams.new(:merge, params).access_levels
}
end
diff --git a/app/services/protected_branches/access_level_params.rb b/app/services/protected_refs/access_level_params.rb
index 6f7a289d9b4..59fc17868d1 100644
--- a/app/services/protected_branches/access_level_params.rb
+++ b/app/services/protected_refs/access_level_params.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-module ProtectedBranches
+module ProtectedRefs
class AccessLevelParams
attr_reader :type, :params
@@ -34,4 +34,4 @@ module ProtectedBranches
end
end
-ProtectedBranches::AccessLevelParams.prepend_mod_with('ProtectedBranches::AccessLevelParams')
+ProtectedRefs::AccessLevelParams.prepend_mod_with('ProtectedRefs::AccessLevelParams')
diff --git a/app/services/quick_actions/interpret_service.rb b/app/services/quick_actions/interpret_service.rb
index 4bcb15b2d9c..1d7c5d2c80a 100644
--- a/app/services/quick_actions/interpret_service.rb
+++ b/app/services/quick_actions/interpret_service.rb
@@ -69,28 +69,32 @@ module QuickActions
Gitlab::QuickActions::Extractor.new(self.class.command_definitions)
end
+ # Find users for commands like /assign
+ #
+ # eg. /assign me and @jane and jack
def extract_users(params)
- return [] if params.blank?
-
- # We are using the a simple User.by_username query here rather than a ReferenceExtractor
- # because the needs here are much simpler: we only deal in usernames, and
- # want to also handle bare usernames. The ReferenceExtractor also has
- # different behaviour, and will return all group members for groups named
- # using a user-style reference, which is not in scope here.
- #
- # nb: underscores may be passed in escaped to protect them from markdown rendering
- args = params.split(/\s|,/).select(&:present?).uniq - ['and']
- args.map! { _1.gsub(/\\_/, '_') }
- usernames = (args - ['me']).map { _1.delete_prefix('@') }
- found = User.by_username(usernames).to_a.select { can?(:read_user, _1) }
- found_names = found.map(&:username).map(&:downcase).to_set
- missing = args.reject do |arg|
- arg == 'me' || found_names.include?(arg.downcase.delete_prefix('@'))
- end.map { "'#{_1}'" }
-
- failed_parse(format(_("Failed to find users for %{missing}"), missing: missing.to_sentence)) if missing.present?
-
- found + [current_user].select { args.include?('me') }
+ Gitlab::QuickActions::UsersExtractor
+ .new(current_user, project: project, group: group, target: quick_action_target, text: params)
+ .execute
+
+ rescue Gitlab::QuickActions::UsersExtractor::Error => err
+ extract_users_failed(err)
+ end
+
+ def extract_users_failed(err)
+ case err
+ when Gitlab::QuickActions::UsersExtractor::MissingError
+ failed_parse(format(_("Failed to find users for %{missing}"), missing: err.message))
+ when Gitlab::QuickActions::UsersExtractor::TooManyRefsError
+ failed_parse(format(_('Too many references. Quick actions are limited to at most %{max_count} user references'),
+ max_count: err.limit))
+ when Gitlab::QuickActions::UsersExtractor::TooManyFoundError
+ failed_parse(format(_("Too many users found. Quick actions are limited to at most %{max_count} users"),
+ max_count: err.limit))
+ else
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(err)
+ failed_parse(_('Something went wrong'))
+ end
end
def find_milestones(project, params = {})
diff --git a/app/services/repositories/changelog_service.rb b/app/services/repositories/changelog_service.rb
index 7a78b323453..447d4d979a6 100644
--- a/app/services/repositories/changelog_service.rb
+++ b/app/services/repositories/changelog_service.rb
@@ -41,6 +41,9 @@ module Repositories
# The `trailer` argument is the Git trailer to use for determining what
# commits to include in the changelog.
#
+ # The `config_file` arguments specifies the path to the configuration file as
+ # stored in the project's Git repository.
+ #
# The `file` arguments specifies the name/path of the file to commit the
# changes to. If the file doesn't exist, it's created automatically.
#
@@ -57,6 +60,7 @@ module Repositories
to: branch,
date: DateTime.now,
trailer: DEFAULT_TRAILER,
+ config_file: Gitlab::Changelog::Config::DEFAULT_FILE_PATH,
file: DEFAULT_FILE,
message: "Add changelog for version #{version}"
)
@@ -68,13 +72,14 @@ module Repositories
@date = date
@branch = branch
@trailer = trailer
+ @config_file = config_file
@file = file
@message = message
end
# rubocop: enable Metrics/ParameterLists
def execute(commit_to_changelog: true)
- config = Gitlab::Changelog::Config.from_git(@project, @user)
+ config = Gitlab::Changelog::Config.from_git(@project, @user, @config_file)
from = start_of_commit_range(config)
# For every entry we want to only include the merge request that
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index 316e6367aa7..eed03ba22fe 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -108,7 +108,7 @@ module ResourceAccessTokens
end
def create_membership(resource, user, access_level)
- resource.add_user(user, access_level, expires_at: params[:expires_at])
+ resource.add_member(user, access_level, expires_at: params[:expires_at])
end
def log_event(token)
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index 28e487aa24d..cea7fc5769e 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -11,7 +11,7 @@ class SearchService
def initialize(current_user, params = {})
@current_user = current_user
- @params = Gitlab::Search::Params.new(params, detect_abuse: prevent_abusive_searches?)
+ @params = Gitlab::Search::Params.new(params, detect_abuse: true)
end
# rubocop: disable CodeReuse/ActiveRecord
@@ -91,12 +91,19 @@ class SearchService
end
end
- private
-
- def prevent_abusive_searches?
- Feature.enabled?(:prevent_abusive_searches, current_user)
+ def level
+ @level ||=
+ if project
+ 'project'
+ elsif group
+ 'group'
+ else
+ 'global'
+ end
end
+ private
+
def page
[1, params[:page].to_i].max
end
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index b850592f7ba..89cb14e6fff 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -52,15 +52,22 @@ module ServicePing
ServicePing::DevopsReport.new(response).execute
end
- return unless Feature.enabled?(:measure_service_ping_metric_collection)
-
- submit_payload({ metadata: { metrics: metrics_collection_time(usage_data) } }, path: METADATA_PATH)
+ submit_payload(metadata(usage_data), path: METADATA_PATH)
end
private
attr_reader :payload, :skip_db_write
+ def metadata(service_ping_payload)
+ {
+ metadata: {
+ uuid: service_ping_payload[:uuid],
+ metrics: metrics_collection_time(service_ping_payload)
+ }
+ }
+ end
+
def metrics_collection_time(payload, parents = [])
return [] unless payload.is_a?(Hash)
diff --git a/app/services/system_notes/incidents_service.rb b/app/services/system_notes/incidents_service.rb
index d5da684a2d8..137994baa74 100644
--- a/app/services/system_notes/incidents_service.rb
+++ b/app/services/system_notes/incidents_service.rb
@@ -15,18 +15,14 @@ module SystemNotes
def add_timeline_event(timeline_event)
author = timeline_event.author
- anchor = "timeline_event_#{timeline_event.id}"
- path = url_helpers.project_issues_incident_path(project, noteable, anchor: anchor)
- body = "added an [incident timeline event](#{path})"
+ body = 'added an incident timeline event'
create_note(NoteSummary.new(noteable, project, author, body, action: 'timeline_event'))
end
def edit_timeline_event(timeline_event, author, was_changed:)
- anchor = "timeline_event_#{timeline_event.id}"
- path = url_helpers.project_issues_incident_path(project, noteable, anchor: anchor)
changed_text = CHANGED_TEXT.fetch(was_changed, '')
- body = "edited #{changed_text}[incident timeline event](#{path})"
+ body = "edited #{changed_text}incident timeline event"
create_note(NoteSummary.new(noteable, project, author, body, action: 'timeline_event'))
end
diff --git a/app/services/terraform/states/trigger_destroy_service.rb b/app/services/terraform/states/trigger_destroy_service.rb
index 3669bdcf716..3347d429bb4 100644
--- a/app/services/terraform/states/trigger_destroy_service.rb
+++ b/app/services/terraform/states/trigger_destroy_service.rb
@@ -12,9 +12,11 @@ module Terraform
return unauthorized_response unless can_destroy_state?
return state_locked_response if state.locked?
- state.update!(deleted_at: Time.current)
+ state.run_after_commit do
+ Terraform::States::DestroyWorker.perform_async(id)
+ end
- Terraform::States::DestroyWorker.perform_async(state.id)
+ state.update!(deleted_at: Time.current)
ServiceResponse.success
end
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index 20594bec28d..4978f778870 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -34,6 +34,8 @@ module Users
return unless lease.try_obtain
@user.update_attribute(:last_activity_on, today)
+
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event('unique_active_user', values: @user.id)
end
end
end
diff --git a/app/services/web_hook_service.rb b/app/services/web_hook_service.rb
index f2f94563e56..cd2c7402713 100644
--- a/app/services/web_hook_service.rb
+++ b/app/services/web_hook_service.rb
@@ -48,7 +48,6 @@ class WebHookService
@force = force
@request_options = {
timeout: Gitlab.config.gitlab.webhook_timeout,
- use_read_total_timeout: true,
allow_local_requests: hook.allow_local_requests?
}
end
@@ -70,7 +69,7 @@ class WebHookService
start_time = Gitlab::Metrics::System.monotonic_time
response = if parsed_url.userinfo.blank?
- make_request(hook.url)
+ make_request(parsed_url.to_s)
else
make_request_with_auth
end
@@ -88,17 +87,19 @@ class WebHookService
rescue *Gitlab::HTTP::HTTP_ERRORS,
Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError => e
execution_duration = Gitlab::Metrics::System.monotonic_time - start_time
+ error_message = e.to_s
+
log_execution(
response: InternalErrorResponse.new,
execution_duration: execution_duration,
- error_message: e.to_s
+ error_message: error_message
)
Gitlab::AppLogger.error("WebHook Error after #{execution_duration.to_i.seconds}s => #{e}")
{
status: :error,
- message: e.to_s
+ message: error_message
}
end
@@ -118,7 +119,11 @@ class WebHookService
private
def parsed_url
- @parsed_url ||= URI.parse(hook.url)
+ @parsed_url ||= URI.parse(hook.interpolated_url)
+ rescue WebHook::InterpolationError => e
+ # Behavior-preserving fallback.
+ Gitlab::ErrorTracking.track_exception(e)
+ @parsed_url = URI.parse(hook.url)
end
def make_request(url, basic_auth = false)
@@ -131,7 +136,7 @@ class WebHookService
end
def make_request_with_auth
- post_url = hook.url.gsub("#{parsed_url.userinfo}@", '')
+ post_url = parsed_url.to_s.gsub("#{parsed_url.userinfo}@", '')
basic_auth = {
username: CGI.unescape(parsed_url.user),
password: CGI.unescape(parsed_url.password.presence || '')
diff --git a/app/services/web_hooks/destroy_service.rb b/app/services/web_hooks/destroy_service.rb
index ecb530f0d2a..54c6c7ea71b 100644
--- a/app/services/web_hooks/destroy_service.rb
+++ b/app/services/web_hooks/destroy_service.rb
@@ -21,8 +21,5 @@ module WebHooks
ServiceResponse.error(message: "Unable to destroy #{web_hook.model_name.human}")
end
end
-
- # Backwards compatibility with WebHooks::DestroyWorker
- alias_method :sync_destroy, :execute
end
end
diff --git a/app/services/web_hooks/log_execution_service.rb b/app/services/web_hooks/log_execution_service.rb
index 0ee7c41469f..17dcf615830 100644
--- a/app/services/web_hooks/log_execution_service.rb
+++ b/app/services/web_hooks/log_execution_service.rb
@@ -44,6 +44,7 @@ module WebHooks
end
log_state_change
+ hook.update_last_failure
end
rescue Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError
raise if raise_lock_error?
diff --git a/app/services/work_items/create_and_link_service.rb b/app/services/work_items/create_and_link_service.rb
index 534d220a846..6a773a84225 100644
--- a/app/services/work_items/create_and_link_service.rb
+++ b/app/services/work_items/create_and_link_service.rb
@@ -25,7 +25,11 @@ module WorkItems
work_item = create_result[:work_item]
return ::ServiceResponse.success(payload: payload(work_item)) if @link_params.blank?
- result = IssueLinks::CreateService.new(work_item, @current_user, @link_params).execute
+ result = WorkItems::ParentLinks::CreateService.new(
+ @link_params[:parent_work_item],
+ @current_user,
+ { target_issuable: work_item }
+ ).execute
if result[:status] == :success
::ServiceResponse.success(payload: payload(work_item))
diff --git a/app/services/work_items/create_from_task_service.rb b/app/services/work_items/create_from_task_service.rb
index 4203c96e676..ef1d47c560d 100644
--- a/app/services/work_items/create_from_task_service.rb
+++ b/app/services/work_items/create_from_task_service.rb
@@ -17,7 +17,7 @@ module WorkItems
current_user: @current_user,
params: @work_item_params.slice(:title, :work_item_type_id),
spam_params: @spam_params,
- link_params: { target_issuable: @work_item }
+ link_params: { parent_work_item: @work_item }
).execute
if create_and_link_result.error?
@@ -27,6 +27,7 @@ module WorkItems
replacement_result = TaskListReferenceReplacementService.new(
work_item: @work_item,
+ current_user: @current_user,
work_item_reference: create_and_link_result[:work_item].to_reference,
line_number_start: @work_item_params[:line_number_start],
line_number_end: @work_item_params[:line_number_end],
diff --git a/app/services/work_items/create_service.rb b/app/services/work_items/create_service.rb
index 705735fe403..c2ceb701a2f 100644
--- a/app/services/work_items/create_service.rb
+++ b/app/services/work_items/create_service.rb
@@ -1,19 +1,19 @@
# frozen_string_literal: true
module WorkItems
- class CreateService
+ class CreateService < Issues::CreateService
include ::Services::ReturnServiceResponses
+ include WidgetableService
- def initialize(project:, current_user: nil, params: {}, spam_params:)
- @create_service = ::Issues::CreateService.new(
+ def initialize(project:, current_user: nil, params: {}, spam_params:, widget_params: {})
+ super(
project: project,
current_user: current_user,
params: params,
spam_params: spam_params,
build_service: ::WorkItems::BuildService.new(project: project, current_user: current_user, params: params)
)
- @current_user = current_user
- @project = project
+ @widget_params = widget_params
end
def execute
@@ -21,13 +21,24 @@ module WorkItems
return error(_('Operation not allowed'), :forbidden)
end
- work_item = @create_service.execute
+ work_item = super
if work_item.valid?
success(payload(work_item))
else
error(work_item.errors.full_messages, :unprocessable_entity, pass_back: payload(work_item))
end
+ rescue ::WorkItems::Widgets::BaseService::WidgetError => e
+ error(e.message, :unprocessable_entity)
+ end
+
+ def transaction_create(work_item)
+ super.tap do |save_result|
+ if save_result
+ execute_widgets(work_item: work_item, callback: :after_create_in_transaction,
+ widget_params: @widget_params)
+ end
+ end
end
private
diff --git a/app/services/work_items/parent_links/create_service.rb b/app/services/work_items/parent_links/create_service.rb
new file mode 100644
index 00000000000..9940776e367
--- /dev/null
+++ b/app/services/work_items/parent_links/create_service.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module ParentLinks
+ class CreateService < IssuableLinks::CreateService
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def relate_issuables(work_item)
+ link = WorkItems::ParentLink.find_or_initialize_by(work_item: work_item)
+ link.work_item_parent = issuable
+
+ if link.changed? && link.save
+ create_notes(work_item)
+ end
+
+ link
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def linkable_issuables(work_items)
+ @linkable_issuables ||= begin
+ return [] unless can?(current_user, :admin_parent_link, issuable)
+
+ work_items.select do |work_item|
+ linkable?(work_item)
+ end
+ end
+ end
+
+ def linkable?(work_item)
+ can?(current_user, :admin_parent_link, work_item) &&
+ !previous_related_issuables.include?(work_item)
+ end
+
+ def previous_related_issuables
+ @related_issues ||= issuable.work_item_children.to_a
+ end
+
+ def extract_references
+ params[:issuable_references]
+ end
+
+ # TODO: Create system notes when work item's parent or children are updated
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/362213
+ def create_notes(work_item)
+ # no-op
+ end
+
+ def target_issuable_type
+ issuable.issue_type == 'issue' ? 'task' : issuable.issue_type
+ end
+
+ def issuables_not_found_message
+ _('No matching %{issuable} found. Make sure that you are adding a valid %{issuable} ID.' %
+ { issuable: target_issuable_type })
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/parent_links/destroy_service.rb b/app/services/work_items/parent_links/destroy_service.rb
new file mode 100644
index 00000000000..55870d44db9
--- /dev/null
+++ b/app/services/work_items/parent_links/destroy_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module ParentLinks
+ class DestroyService < IssuableLinks::DestroyService
+ attr_reader :link, :current_user, :parent, :child
+
+ def initialize(link, user)
+ @link = link
+ @current_user = user
+ @parent = link.work_item_parent
+ @child = link.work_item
+ end
+
+ private
+
+ # TODO: Create system notes when work item's parent or children are removed
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/362213
+ def create_notes
+ # no-op
+ end
+
+ def not_found_message
+ _('No Work Item Link found')
+ end
+
+ def permission_to_remove_relation?
+ can?(current_user, :admin_parent_link, child) && can?(current_user, :admin_parent_link, parent)
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/task_list_reference_removal_service.rb b/app/services/work_items/task_list_reference_removal_service.rb
index e7ec73a96e0..9152580bef0 100644
--- a/app/services/work_items/task_list_reference_removal_service.rb
+++ b/app/services/work_items/task_list_reference_removal_service.rb
@@ -11,6 +11,7 @@ module WorkItems
@line_number_end = line_number_end
@lock_version = lock_version
@current_user = current_user
+ @task_reference = /#{Regexp.escape(@task.to_reference)}(?!\d)\+/
end
def execute
@@ -26,7 +27,9 @@ module WorkItems
line_matches_reference = (@line_number_start..@line_number_end).any? do |line_number|
markdown_line = source_lines[line_number - 1]
- /#{Regexp.escape(@task.to_reference)}(?!\d)/.match?(markdown_line)
+ if @task_reference.match?(markdown_line)
+ markdown_line.sub!(@task_reference, @task.title)
+ end
end
unless line_matches_reference
@@ -35,8 +38,6 @@ module WorkItems
)
end
- remove_task_lines!(source_lines)
-
::WorkItems::UpdateService.new(
project: @work_item.project,
current_user: @current_user,
@@ -51,13 +52,5 @@ module WorkItems
rescue ActiveRecord::StaleObjectError
::ServiceResponse.error(message: STALE_OBJECT_MESSAGE)
end
-
- private
-
- def remove_task_lines!(source_lines)
- source_lines.delete_if.each_with_index do |_line, index|
- index >= @line_number_start - 1 && index < @line_number_end
- end
- end
end
end
diff --git a/app/services/work_items/task_list_reference_replacement_service.rb b/app/services/work_items/task_list_reference_replacement_service.rb
index 1044a4feb88..b098d67561b 100644
--- a/app/services/work_items/task_list_reference_replacement_service.rb
+++ b/app/services/work_items/task_list_reference_replacement_service.rb
@@ -4,8 +4,9 @@ module WorkItems
class TaskListReferenceReplacementService
STALE_OBJECT_MESSAGE = 'Stale work item. Check lock version'
- def initialize(work_item:, work_item_reference:, line_number_start:, line_number_end:, title:, lock_version:)
+ def initialize(work_item:, current_user:, work_item_reference:, line_number_start:, line_number_end:, title:, lock_version:)
@work_item = work_item
+ @current_user = current_user
@work_item_reference = work_item_reference
@line_number_start = line_number_start
@line_number_end = line_number_end
@@ -32,7 +33,11 @@ module WorkItems
source_lines[@line_number_start - 1] = markdown_task_first_line
remove_additional_lines!(source_lines)
- @work_item.update!(description: source_lines.join("\n"))
+ ::WorkItems::UpdateService.new(
+ project: @work_item.project,
+ current_user: @current_user,
+ params: { description: source_lines.join("\n"), lock_version: @lock_version }
+ ).execute(@work_item)
::ServiceResponse.success
rescue ActiveRecord::StaleObjectError
diff --git a/app/services/work_items/update_service.rb b/app/services/work_items/update_service.rb
index 0b420881b4b..98818fda263 100644
--- a/app/services/work_items/update_service.rb
+++ b/app/services/work_items/update_service.rb
@@ -2,16 +2,38 @@
module WorkItems
class UpdateService < ::Issues::UpdateService
+ include WidgetableService
+
def initialize(project:, current_user: nil, params: {}, spam_params: nil, widget_params: {})
+ params[:widget_params] = true if widget_params.present?
+
super(project: project, current_user: current_user, params: params, spam_params: nil)
@widget_params = widget_params
end
+ def execute(work_item)
+ updated_work_item = super
+
+ if updated_work_item.valid?
+ success(payload(work_item))
+ else
+ error(updated_work_item.errors.full_messages, :unprocessable_entity, pass_back: payload(updated_work_item))
+ end
+ rescue ::WorkItems::Widgets::BaseService::WidgetError => e
+ error(e.message, :unprocessable_entity)
+ end
+
private
def update(work_item)
- execute_widgets(work_item: work_item, callback: :update)
+ execute_widgets(work_item: work_item, callback: :update, widget_params: @widget_params)
+
+ super
+ end
+
+ def transaction_update(work_item, opts = {})
+ execute_widgets(work_item: work_item, callback: :before_update_in_transaction, widget_params: @widget_params)
super
end
@@ -22,10 +44,8 @@ module WorkItems
GraphqlTriggers.issuable_title_updated(work_item) if work_item.previous_changes.key?(:title)
end
- def execute_widgets(work_item:, callback:)
- work_item.widgets.each do |widget|
- widget.try(callback, params: @widget_params[widget.class.api_symbol])
- end
+ def payload(work_item)
+ { work_item: work_item }
end
end
end
diff --git a/app/services/work_items/widgets/base_service.rb b/app/services/work_items/widgets/base_service.rb
new file mode 100644
index 00000000000..037733bbed5
--- /dev/null
+++ b/app/services/work_items/widgets/base_service.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ class BaseService < ::BaseService
+ WidgetError = Class.new(StandardError)
+
+ attr_reader :widget, :current_user
+
+ def initialize(widget:, current_user:)
+ @widget = widget
+ @current_user = current_user
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/description_service/update_service.rb b/app/services/work_items/widgets/description_service/update_service.rb
new file mode 100644
index 00000000000..e63b6b2ee6c
--- /dev/null
+++ b/app/services/work_items/widgets/description_service/update_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module DescriptionService
+ class UpdateService < WorkItems::Widgets::BaseService
+ def update(params: {})
+ return unless params.present? && params[:description]
+
+ widget.work_item.description = params[:description]
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/hierarchy_service/base_service.rb b/app/services/work_items/widgets/hierarchy_service/base_service.rb
new file mode 100644
index 00000000000..085d6c6b0e7
--- /dev/null
+++ b/app/services/work_items/widgets/hierarchy_service/base_service.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module HierarchyService
+ class BaseService < WorkItems::Widgets::BaseService
+ private
+
+ def handle_hierarchy_changes(params)
+ return feature_flag_error unless feature_flag_enabled?
+ return incompatible_args_error if incompatible_args?(params)
+
+ if params.key?(:parent)
+ update_work_item_parent(params.delete(:parent))
+ elsif params.key?(:children)
+ update_work_item_children(params.delete(:children))
+ else
+ invalid_args_error
+ end
+ end
+
+ def update_work_item_parent(parent)
+ if parent.nil?
+ remove_parent
+ else
+ set_parent(parent)
+ end
+ end
+
+ def set_parent(parent)
+ ::WorkItems::ParentLinks::CreateService
+ .new(parent, current_user, { target_issuable: widget.work_item })
+ .execute
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def remove_parent
+ link = ::WorkItems::ParentLink.find_by(work_item: widget.work_item)
+ return success unless link.present?
+
+ ::WorkItems::ParentLinks::DestroyService.new(link, current_user).execute
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def update_work_item_children(children)
+ ::WorkItems::ParentLinks::CreateService
+ .new(widget.work_item, current_user, { issuable_references: children })
+ .execute
+ end
+
+ def feature_flag_enabled?
+ Feature.enabled?(:work_items_hierarchy, widget.work_item&.project)
+ end
+
+ def incompatible_args?(params)
+ params[:children] && params[:parent]
+ end
+
+ def feature_flag_error
+ error(_('`work_items_hierarchy` feature flag disabled for this project'))
+ end
+
+ def incompatible_args_error
+ error(_('A Work Item can be a parent or a child, but not both.'))
+ end
+
+ def invalid_args_error
+ error(_("One or more arguments are invalid: %{args}." % { args: params.keys.to_sentence } ))
+ end
+
+ def service_response!(result)
+ return result unless result[:status] == :error
+
+ raise WidgetError, result[:message]
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/hierarchy_service/create_service.rb b/app/services/work_items/widgets/hierarchy_service/create_service.rb
new file mode 100644
index 00000000000..c97812fade2
--- /dev/null
+++ b/app/services/work_items/widgets/hierarchy_service/create_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module HierarchyService
+ class CreateService < WorkItems::Widgets::HierarchyService::BaseService
+ def after_create_in_transaction(params:)
+ return unless params.present?
+
+ service_response!(handle_hierarchy_changes(params))
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/hierarchy_service/update_service.rb b/app/services/work_items/widgets/hierarchy_service/update_service.rb
new file mode 100644
index 00000000000..48b540f919e
--- /dev/null
+++ b/app/services/work_items/widgets/hierarchy_service/update_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module HierarchyService
+ class UpdateService < WorkItems::Widgets::HierarchyService::BaseService
+ def before_update_in_transaction(params:)
+ return unless params.present?
+
+ service_response!(handle_hierarchy_changes(params))
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/weight_service/update_service.rb b/app/services/work_items/widgets/weight_service/update_service.rb
new file mode 100644
index 00000000000..cd62a25358f
--- /dev/null
+++ b/app/services/work_items/widgets/weight_service/update_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module WeightService
+ class UpdateService < WorkItems::Widgets::BaseService
+ def update(params: {})
+ return unless params.present? && params[:weight]
+
+ widget.work_item.weight = params[:weight]
+ end
+ end
+ end
+ end
+end