Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 14:10:13 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 14:10:13 +0300
commit0ea3fcec397b69815975647f5e2aa5fe944a8486 (patch)
tree7979381b89d26011bcf9bdc989a40fcc2f1ed4ff /app/services
parent72123183a20411a36d607d70b12d57c484394c8e (diff)
Add latest changes from gitlab-org/gitlab@15-1-stable-eev15.1.0-rc42
Diffstat (limited to 'app/services')
-rw-r--r--app/services/auto_merge/base_service.rb5
-rw-r--r--app/services/boards/base_items_list_service.rb7
-rw-r--r--app/services/boards/issues/list_service.rb2
-rw-r--r--app/services/bulk_create_integration_service.rb14
-rw-r--r--app/services/bulk_imports/create_pipeline_trackers_service.rb68
-rw-r--r--app/services/bulk_imports/file_export_service.rb4
-rw-r--r--app/services/bulk_imports/lfs_objects_export_service.rb2
-rw-r--r--app/services/bulk_imports/repository_bundle_export_service.rb23
-rw-r--r--app/services/bulk_update_integration_service.rb17
-rw-r--r--app/services/ci/job_artifacts/destroy_all_expired_service.rb2
-rw-r--r--app/services/ci/job_artifacts/destroy_batch_service.rb37
-rw-r--r--app/services/ci/pipeline_artifacts/coverage_report_service.rb38
-rw-r--r--app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb2
-rw-r--r--app/services/ci/runners/reset_registration_token_service.rb7
-rw-r--r--app/services/clusters/applications/schedule_update_service.rb40
-rw-r--r--app/services/concerns/integrations/bulk_operation_hashes.rb31
-rw-r--r--app/services/concerns/members/bulk_create_users.rb86
-rw-r--r--app/services/environments/stop_service.rb6
-rw-r--r--app/services/event_create_service.rb18
-rw-r--r--app/services/git/branch_push_service.rb1
-rw-r--r--app/services/import/base_service.rb4
-rw-r--r--app/services/import/bitbucket_server_service.rb4
-rw-r--r--app/services/import/fogbugz_service.rb107
-rw-r--r--app/services/import/github_service.rb4
-rw-r--r--app/services/incident_management/timeline_events/base_service.rb2
-rw-r--r--app/services/incident_management/timeline_events/create_service.rb5
-rw-r--r--app/services/incident_management/timeline_events/destroy_service.rb1
-rw-r--r--app/services/incident_management/timeline_events/update_service.rb6
-rw-r--r--app/services/issuable/clone/base_service.rb7
-rw-r--r--app/services/issuable/common_system_notes_service.rb2
-rw-r--r--app/services/issues/create_service.rb8
-rw-r--r--app/services/issues/move_service.rb1
-rw-r--r--app/services/jira_connect_subscriptions/create_service.rb13
-rw-r--r--app/services/markdown_content_rewriter_service.rb62
-rw-r--r--app/services/members/approve_access_request_service.rb17
-rw-r--r--app/services/members/base_service.rb13
-rw-r--r--app/services/members/create_service.rb13
-rw-r--r--app/services/members/creator_service.rb119
-rw-r--r--app/services/members/destroy_service.rb11
-rw-r--r--app/services/members/groups/bulk_creator_service.rb9
-rw-r--r--app/services/members/groups/creator_service.rb6
-rw-r--r--app/services/members/mailgun/process_webhook_service.rb39
-rw-r--r--app/services/members/projects/bulk_creator_service.rb9
-rw-r--r--app/services/members/projects/creator_service.rb24
-rw-r--r--app/services/members/update_service.rb17
-rw-r--r--app/services/merge_requests/base_service.rb22
-rw-r--r--app/services/merge_requests/build_service.rb36
-rw-r--r--app/services/merge_requests/create_pipeline_service.rb8
-rw-r--r--app/services/merge_requests/merge_service.rb8
-rw-r--r--app/services/merge_requests/mergeability/run_checks_service.rb12
-rw-r--r--app/services/merge_requests/post_merge_service.rb2
-rw-r--r--app/services/merge_requests/refresh_service.rb10
-rw-r--r--app/services/merge_requests/reload_merge_head_diff_service.rb2
-rw-r--r--app/services/merge_requests/update_service.rb12
-rw-r--r--app/services/metrics/dashboard/base_service.rb1
-rw-r--r--app/services/metrics/dashboard/panel_preview_service.rb1
-rw-r--r--app/services/metrics/dashboard/system_dashboard_service.rb3
-rw-r--r--app/services/note_summary.rb4
-rw-r--r--app/services/notes/copy_service.rb17
-rw-r--r--app/services/notification_recipients/build_service.rb4
-rw-r--r--app/services/notification_recipients/builder/new_release.rb25
-rw-r--r--app/services/notification_service.rb4
-rw-r--r--app/services/packages/cleanup/update_policy_service.rb35
-rw-r--r--app/services/packages/go/create_package_service.rb3
-rw-r--r--app/services/packages/maven/metadata/append_package_file_service.rb4
-rw-r--r--app/services/packages/rubygems/create_gemspec_service.rb4
-rw-r--r--app/services/pages/delete_service.rb13
-rw-r--r--app/services/pages_domains/create_acme_order_service.rb10
-rw-r--r--app/services/projects/after_rename_service.rb14
-rw-r--r--app/services/projects/destroy_rollback_service.rb31
-rw-r--r--app/services/projects/destroy_service.rb19
-rw-r--r--app/services/projects/import_export/export_service.rb7
-rw-r--r--app/services/projects/open_issues_count_service.rb6
-rw-r--r--app/services/projects/operations/update_service.rb2
-rw-r--r--app/services/projects/transfer_service.rb8
-rw-r--r--app/services/projects/update_pages_service.rb8
-rw-r--r--app/services/releases/base_service.rb4
-rw-r--r--app/services/releases/create_service.rb2
-rw-r--r--app/services/repositories/base_service.rb12
-rw-r--r--app/services/repositories/changelog_service.rb32
-rw-r--r--app/services/repositories/destroy_rollback_service.rb25
-rw-r--r--app/services/repositories/destroy_service.rb36
-rw-r--r--app/services/repositories/shell_destroy_service.rb15
-rw-r--r--app/services/resource_access_tokens/create_service.rb10
-rw-r--r--app/services/resource_events/base_change_timebox_service.rb2
-rw-r--r--app/services/resource_events/base_synthetic_notes_builder_service.rb9
-rw-r--r--app/services/service_ping/submit_service.rb11
-rw-r--r--app/services/service_response.rb18
-rw-r--r--app/services/snippets/bulk_destroy_service.rb14
-rw-r--r--app/services/snippets/destroy_service.rb5
-rw-r--r--app/services/static_site_editor/config_service.rb85
-rw-r--r--app/services/system_notes/issuables_service.rb15
-rw-r--r--app/services/system_notes/merge_requests_service.rb2
-rw-r--r--app/services/terraform/remote_state_handler.rb14
-rw-r--r--app/services/terraform/states/destroy_service.rb34
-rw-r--r--app/services/terraform/states/trigger_destroy_service.rb43
-rw-r--r--app/services/two_factor/destroy_service.rb8
-rw-r--r--app/services/user_project_access_changed_service.rb6
-rw-r--r--app/services/web_hook_service.rb85
-rw-r--r--app/services/web_hooks/destroy_service.rb70
-rw-r--r--app/services/web_hooks/log_destroy_service.rb19
-rw-r--r--app/services/work_items/update_service.rb22
102 files changed, 1136 insertions, 710 deletions
diff --git a/app/services/auto_merge/base_service.rb b/app/services/auto_merge/base_service.rb
index 4ed4368d3b7..2a32f0c74ac 100644
--- a/app/services/auto_merge/base_service.rb
+++ b/app/services/auto_merge/base_service.rb
@@ -55,7 +55,10 @@ module AutoMerge
def available_for?(merge_request)
strong_memoize("available_for_#{merge_request.id}") do
merge_request.can_be_merged_by?(current_user) &&
- merge_request.mergeable_state?(skip_ci_check: true) &&
+ merge_request.open? &&
+ !merge_request.broken? &&
+ !merge_request.draft? &&
+ merge_request.mergeable_discussions_state? &&
yield
end
end
diff --git a/app/services/boards/base_items_list_service.rb b/app/services/boards/base_items_list_service.rb
index 01fad14d036..2a9cbb83cc4 100644
--- a/app/services/boards/base_items_list_service.rb
+++ b/app/services/boards/base_items_list_service.rb
@@ -78,12 +78,15 @@ module Boards
end
def list
- return unless params.key?(:id)
+ return unless params.key?(:id) || params.key?(:list)
strong_memoize(:list) do
id = params[:id]
+ list = params[:list]
- if board.lists.loaded?
+ if list.present?
+ list
+ elsif board.lists.loaded?
board.lists.find { |l| l.id == id }
else
board.lists.find(id)
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 6021d634f86..465025ef2e9 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -20,7 +20,7 @@ module Boards
private
def order(items)
- return items.order_closed_date_desc if list&.closed?
+ return items.order_closed_at_desc if list&.closed?
items.order_by_relative_position
end
diff --git a/app/services/bulk_create_integration_service.rb b/app/services/bulk_create_integration_service.rb
index 3a214122ed3..8fbb7f4f347 100644
--- a/app/services/bulk_create_integration_service.rb
+++ b/app/services/bulk_create_integration_service.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class BulkCreateIntegrationService
+ include Integrations::BulkOperationHashes
+
def initialize(integration, batch, association)
@integration = integration
@batch = batch
@@ -8,13 +10,13 @@ class BulkCreateIntegrationService
end
def execute
- service_list = ServiceList.new(batch, integration_hash, association).to_array
+ service_list = ServiceList.new(batch, integration_hash(:create), association).to_array
Integration.transaction do
results = bulk_insert(*service_list)
if integration.data_fields_present?
- data_list = DataList.new(results, data_fields_hash, integration.data_fields.class).to_array
+ data_list = DataList.new(results, data_fields_hash(:create), integration.data_fields.class).to_array
bulk_insert(*data_list)
end
@@ -30,12 +32,4 @@ class BulkCreateIntegrationService
klass.insert_all(items_to_insert, returning: [:id])
end
-
- def integration_hash
- integration.to_integration_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
- end
-
- def data_fields_hash
- integration.to_data_fields_hash
- end
end
diff --git a/app/services/bulk_imports/create_pipeline_trackers_service.rb b/app/services/bulk_imports/create_pipeline_trackers_service.rb
new file mode 100644
index 00000000000..5c9c68e62b5
--- /dev/null
+++ b/app/services/bulk_imports/create_pipeline_trackers_service.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class CreatePipelineTrackersService
+ def initialize(entity)
+ @entity = entity
+ end
+
+ def execute!
+ entity.class.transaction do
+ entity.pipelines.each do |pipeline|
+ status = skip_pipeline?(pipeline) ? -2 : 0
+
+ entity.trackers.create!(
+ stage: pipeline[:stage],
+ pipeline_name: pipeline[:pipeline],
+ status: status
+ )
+ end
+ end
+ end
+
+ private
+
+ attr_reader :entity
+
+ def skip_pipeline?(pipeline)
+ return false unless source_version.valid?
+
+ minimum_version, maximum_version = pipeline.values_at(:minimum_source_version, :maximum_source_version)
+
+ if minimum_version && non_patch_source_version < Gitlab::VersionInfo.parse(minimum_version)
+ log_skipped_pipeline(pipeline, minimum_version, maximum_version)
+ return true
+ end
+
+ if maximum_version && non_patch_source_version > Gitlab::VersionInfo.parse(maximum_version)
+ log_skipped_pipeline(pipeline, minimum_version, maximum_version)
+ return true
+ end
+
+ false
+ end
+
+ def source_version
+ @source_version ||= entity.bulk_import.source_version_info
+ end
+
+ def non_patch_source_version
+ Gitlab::VersionInfo.new(source_version.major, source_version.minor, 0)
+ end
+
+ def log_skipped_pipeline(pipeline, minimum_version, maximum_version)
+ logger.info(
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ entity_id: entity.id,
+ pipeline_name: pipeline[:pipeline],
+ minimum_source_version: minimum_version,
+ maximum_source_version: maximum_version,
+ source_version: source_version.to_s
+ )
+ end
+
+ def logger
+ @logger ||= Gitlab::Import::Logger.build
+ end
+ end
+end
diff --git a/app/services/bulk_imports/file_export_service.rb b/app/services/bulk_imports/file_export_service.rb
index a9d06d84277..b2d114368a1 100644
--- a/app/services/bulk_imports/file_export_service.rb
+++ b/app/services/bulk_imports/file_export_service.rb
@@ -30,6 +30,10 @@ module BulkImports
UploadsExportService.new(portable, export_path)
when FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION
LfsObjectsExportService.new(portable, export_path)
+ when FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION
+ RepositoryBundleExportService.new(portable.repository, export_path, relation)
+ when FileTransfer::ProjectConfig::DESIGN_BUNDLE_RELATION
+ RepositoryBundleExportService.new(portable.design_repository, export_path, relation)
else
raise BulkImports::Error, 'Unsupported relation export type'
end
diff --git a/app/services/bulk_imports/lfs_objects_export_service.rb b/app/services/bulk_imports/lfs_objects_export_service.rb
index fa606e4e5a3..1f745201c8a 100644
--- a/app/services/bulk_imports/lfs_objects_export_service.rb
+++ b/app/services/bulk_imports/lfs_objects_export_service.rb
@@ -32,6 +32,8 @@ module BulkImports
destination_filepath = File.join(export_path, lfs_object.oid)
if lfs_object.local_store?
+ return unless File.exist?(lfs_object.file.path)
+
copy_files(lfs_object.file.path, destination_filepath)
else
download(lfs_object.file.url, destination_filepath)
diff --git a/app/services/bulk_imports/repository_bundle_export_service.rb b/app/services/bulk_imports/repository_bundle_export_service.rb
new file mode 100644
index 00000000000..31a2ed6d1af
--- /dev/null
+++ b/app/services/bulk_imports/repository_bundle_export_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class RepositoryBundleExportService
+ def initialize(repository, export_path, export_filename)
+ @repository = repository
+ @export_path = export_path
+ @export_filename = export_filename
+ end
+
+ def execute
+ repository.bundle_to_disk(bundle_filepath) if repository.exists?
+ end
+
+ private
+
+ attr_reader :repository, :export_path, :export_filename
+
+ def bundle_filepath
+ File.join(export_path, "#{export_filename}.bundle")
+ end
+ end
+end
diff --git a/app/services/bulk_update_integration_service.rb b/app/services/bulk_update_integration_service.rb
index 29c4d0cc220..57ceec57962 100644
--- a/app/services/bulk_update_integration_service.rb
+++ b/app/services/bulk_update_integration_service.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class BulkUpdateIntegrationService
+ include Integrations::BulkOperationHashes
+
def initialize(integration, batch)
@integration = integration
@batch = batch
@@ -9,10 +11,13 @@ class BulkUpdateIntegrationService
# rubocop: disable CodeReuse/ActiveRecord
def execute
Integration.transaction do
- Integration.where(id: batch_ids).update_all(integration_hash)
+ Integration.where(id: batch_ids).update_all(integration_hash(:update))
if integration.data_fields_present?
- integration.data_fields.class.where(data_fields_foreign_key => batch_ids).update_all(data_fields_hash)
+ integration.data_fields.class.where(data_fields_foreign_key => batch_ids)
+ .update_all(
+ data_fields_hash(:update)
+ )
end
end
end
@@ -27,14 +32,6 @@ class BulkUpdateIntegrationService
integration.data_fields.class.reflections['integration'].foreign_key
end
- def integration_hash
- integration.to_integration_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
- end
-
- def data_fields_hash
- integration.to_data_fields_hash
- end
-
def batch_ids
@batch_ids ||=
if batch.is_a?(ActiveRecord::Relation)
diff --git a/app/services/ci/job_artifacts/destroy_all_expired_service.rb b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
index 4070875ffe1..b5dd5b843c6 100644
--- a/app/services/ci/job_artifacts/destroy_all_expired_service.rb
+++ b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
@@ -60,7 +60,7 @@ module Ci
end
def destroy_batch(artifacts)
- Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute
+ Ci::JobArtifacts::DestroyBatchService.new(artifacts, skip_projects_on_refresh: true).execute
end
def loop_timeout?
diff --git a/app/services/ci/job_artifacts/destroy_batch_service.rb b/app/services/ci/job_artifacts/destroy_batch_service.rb
index 5121a8b0a8b..49b65f13804 100644
--- a/app/services/ci/job_artifacts/destroy_batch_service.rb
+++ b/app/services/ci/job_artifacts/destroy_batch_service.rb
@@ -17,14 +17,21 @@ module Ci
# +pick_up_at+:: When to pick up for deletion of files
# Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys
- def initialize(job_artifacts, pick_up_at: nil, fix_expire_at: fix_expire_at?)
+ def initialize(job_artifacts, pick_up_at: nil, fix_expire_at: fix_expire_at?, skip_projects_on_refresh: false)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
@fix_expire_at = fix_expire_at
+ @skip_projects_on_refresh = skip_projects_on_refresh
end
# rubocop: disable CodeReuse/ActiveRecord
def execute(update_stats: true)
+ if @skip_projects_on_refresh
+ exclude_artifacts_undergoing_stats_refresh
+ else
+ track_artifacts_undergoing_stats_refresh
+ end
+
# Detect and fix artifacts that had `expire_at` wrongly backfilled by migration
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47723
detect_and_fix_wrongly_expired_artifacts
@@ -154,6 +161,34 @@ module Ci
Gitlab::AppLogger.info(message: "Fixed expire_at from artifacts.", fixed_artifacts_expire_at_count: artifacts.count)
end
+
+ def track_artifacts_undergoing_stats_refresh
+ project_ids = @job_artifacts.find_all do |artifact|
+ artifact.project.refreshing_build_artifacts_size?
+ end.map(&:project_id).uniq
+
+ project_ids.each do |project_id|
+ Gitlab::ProjectStatsRefreshConflictsLogger.warn_artifact_deletion_during_stats_refresh(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_id: project_id
+ )
+ end
+ end
+
+ def exclude_artifacts_undergoing_stats_refresh
+ project_ids = Set.new
+
+ @job_artifacts.reject! do |artifact|
+ next unless artifact.project.refreshing_build_artifacts_size?
+
+ project_ids << artifact.project_id
+ end
+
+ Gitlab::ProjectStatsRefreshConflictsLogger.warn_skipped_artifact_deletion_during_stats_refresh(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_ids: project_ids
+ )
+ end
end
end
end
diff --git a/app/services/ci/pipeline_artifacts/coverage_report_service.rb b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
index 8209639fa22..b0acb1d5a0b 100644
--- a/app/services/ci/pipeline_artifacts/coverage_report_service.rb
+++ b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
@@ -2,30 +2,44 @@
module Ci
module PipelineArtifacts
class CoverageReportService
- def execute(pipeline)
- return unless pipeline.can_generate_coverage_reports?
- return if pipeline.has_coverage_reports?
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ end
- file = build_carrierwave_file(pipeline)
+ def execute
+ return if pipeline.has_coverage_reports?
+ return if report.empty?
pipeline.pipeline_artifacts.create!(
project_id: pipeline.project_id,
file_type: :code_coverage,
file_format: Ci::PipelineArtifact::REPORT_TYPES.fetch(:code_coverage),
- size: file["tempfile"].size,
- file: file,
+ size: carrierwave_file["tempfile"].size,
+ file: carrierwave_file,
expire_at: Ci::PipelineArtifact::EXPIRATION_DATE.from_now
)
end
private
- def build_carrierwave_file(pipeline)
- CarrierWaveStringFile.new_file(
- file_content: pipeline.coverage_reports.to_json,
- filename: Ci::PipelineArtifact::DEFAULT_FILE_NAMES.fetch(:code_coverage),
- content_type: 'application/json'
- )
+ attr_reader :pipeline
+
+ def report
+ strong_memoize(:report) do
+ Gitlab::Ci::Reports::CoverageReportGenerator.new(pipeline).report
+ end
+ end
+
+ def carrierwave_file
+ strong_memoize(:carrier_wave_file) do
+ CarrierWaveStringFile.new_file(
+ file_content: report.to_json,
+ filename: Ci::PipelineArtifact::DEFAULT_FILE_NAMES.fetch(:code_coverage),
+ content_type: 'application/json'
+ )
+ end
end
end
end
diff --git a/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb b/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
index 7b6590a117c..17c039885e5 100644
--- a/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
+++ b/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
@@ -25,7 +25,7 @@ module Ci
private
def destroy_artifacts_batch
- artifacts = ::Ci::PipelineArtifact.unlocked.expired(BATCH_SIZE).to_a
+ artifacts = ::Ci::PipelineArtifact.unlocked.expired.limit(BATCH_SIZE).to_a
return false if artifacts.empty?
artifacts.each(&:destroy!)
diff --git a/app/services/ci/runners/reset_registration_token_service.rb b/app/services/ci/runners/reset_registration_token_service.rb
index 2a3fb08c5e1..81a70a771cf 100644
--- a/app/services/ci/runners/reset_registration_token_service.rb
+++ b/app/services/ci/runners/reset_registration_token_service.rb
@@ -13,11 +13,10 @@ module Ci
def execute
return unless @user.present? && @user.can?(:update_runners_registration_token, scope)
- case scope
- when ::ApplicationSetting
+ if scope.respond_to?(:runners_registration_token)
scope.reset_runners_registration_token!
- ApplicationSetting.current_without_cache.runners_registration_token
- when ::Group, ::Project
+ scope.runners_registration_token
+ else
scope.reset_runners_token!
scope.runners_token
end
diff --git a/app/services/clusters/applications/schedule_update_service.rb b/app/services/clusters/applications/schedule_update_service.rb
deleted file mode 100644
index 4fabf1d809e..00000000000
--- a/app/services/clusters/applications/schedule_update_service.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-# DEPRECATED: To be removed as part of https://gitlab.com/groups/gitlab-org/-/epics/5877
-module Clusters
- module Applications
- class ScheduleUpdateService
- BACKOFF_DELAY = 2.minutes
-
- attr_accessor :application, :project
-
- def initialize(cluster_prometheus_adapter, project)
- @application = cluster_prometheus_adapter&.cluster&.application_prometheus
- @project = project
- end
-
- def execute
- return unless application
- return if application.externally_installed?
-
- if recently_scheduled?
- worker_class.perform_in(BACKOFF_DELAY, application.name, application.id, project.id, Time.current)
- else
- worker_class.perform_async(application.name, application.id, project.id, Time.current)
- end
- end
-
- private
-
- def worker_class
- ::ClusterUpdateAppWorker
- end
-
- def recently_scheduled?
- return false unless application.last_update_started_at
-
- application.last_update_started_at.utc >= Time.current.utc - BACKOFF_DELAY
- end
- end
- end
-end
diff --git a/app/services/concerns/integrations/bulk_operation_hashes.rb b/app/services/concerns/integrations/bulk_operation_hashes.rb
new file mode 100644
index 00000000000..3f13c764ebe
--- /dev/null
+++ b/app/services/concerns/integrations/bulk_operation_hashes.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+# Returns hashes of attributes suitable for passing to `.insert_all` or `update_all`
+module Integrations
+ module BulkOperationHashes
+ private
+
+ def integration_hash(operation)
+ integration
+ .to_database_hash
+ .merge('inherit_from_id' => integration.inherit_from_id || integration.id)
+ .merge(update_timestamps(operation))
+ end
+
+ def data_fields_hash(operation)
+ integration
+ .data_fields
+ .to_database_hash
+ .merge(update_timestamps(operation))
+ end
+
+ def update_timestamps(operation)
+ time_now = Time.current
+
+ {
+ 'created_at' => (time_now if operation == :create),
+ 'updated_at' => time_now
+ }.compact
+ end
+ end
+end
diff --git a/app/services/concerns/members/bulk_create_users.rb b/app/services/concerns/members/bulk_create_users.rb
deleted file mode 100644
index e60c84af89e..00000000000
--- a/app/services/concerns/members/bulk_create_users.rb
+++ /dev/null
@@ -1,86 +0,0 @@
-# frozen_string_literal: true
-
-module Members
- module BulkCreateUsers
- extend ActiveSupport::Concern
-
- included do
- class << self
- def add_users(source, users, access_level, current_user: nil, expires_at: nil, tasks_to_be_done: [], tasks_project_id: nil)
- return [] unless users.present?
-
- emails, users, existing_members = parse_users_list(source, users)
-
- Member.transaction do
- (emails + users).map! do |user|
- new(source,
- user,
- access_level,
- existing_members: existing_members,
- current_user: current_user,
- expires_at: expires_at,
- tasks_to_be_done: tasks_to_be_done,
- tasks_project_id: tasks_project_id)
- .execute
- end
- end
- end
-
- private
-
- def parse_users_list(source, list)
- emails = []
- user_ids = []
- users = []
- existing_members = {}
-
- list.each do |item|
- case item
- when User
- users << item
- when Integer
- user_ids << item
- when /\A\d+\Z/
- user_ids << item.to_i
- when Devise.email_regexp
- emails << item
- end
- end
-
- # the below will automatically discard invalid user_ids
- users.concat(User.id_in(user_ids)) if user_ids.present?
- users.uniq! # de-duplicate just in case as there is no controlling if user records and ids are sent multiple times
-
- users_by_emails = source.users_by_emails(emails) # preloads our request store for all emails
- # in case emails belong to a user that is being invited by user or user_id, remove them from
- # emails and let users/user_ids handle it.
- parsed_emails = emails.select do |email|
- user = users_by_emails[email]
- !user || (users.exclude?(user) && user_ids.exclude?(user.id))
- end
-
- if users.present?
- # helps not have to perform another query per user id to see if the member exists later on when fetching
- existing_members = source.members_and_requesters.with_user(users).index_by(&:user_id)
- end
-
- [parsed_emails, users, existing_members]
- end
- end
- end
-
- def initialize(source, user, access_level, **args)
- super
-
- @existing_members = args[:existing_members] || (raise ArgumentError, "existing_members must be included in the args hash")
- end
-
- private
-
- attr_reader :existing_members
-
- def find_or_initialize_member_by_user
- existing_members[user.id] || source.members.build(user_id: user.id)
- end
- end
-end
diff --git a/app/services/environments/stop_service.rb b/app/services/environments/stop_service.rb
index 54ad94947ff..75c878c9350 100644
--- a/app/services/environments/stop_service.rb
+++ b/app/services/environments/stop_service.rb
@@ -7,7 +7,11 @@ module Environments
def execute(environment)
return unless can?(current_user, :stop_environment, environment)
- environment.stop_with_actions!(current_user)
+ if params[:force]
+ environment.stop_complete!
+ else
+ environment.stop_with_actions!(current_user)
+ end
end
def execute_for_branch(branch_name)
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index 5a2c29f8e7a..2ab4bb47462 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -25,12 +25,14 @@ class EventCreateService
def open_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :created).tap do
track_event(event_action: :created, event_target: MergeRequest, author_id: current_user.id)
+ track_mr_snowplow_event(merge_request, current_user, :create)
end
end
def close_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :closed).tap do
track_event(event_action: :closed, event_target: MergeRequest, author_id: current_user.id)
+ track_mr_snowplow_event(merge_request, current_user, :close)
end
end
@@ -41,6 +43,7 @@ class EventCreateService
def merge_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :merged).tap do
track_event(event_action: :merged, event_target: MergeRequest, author_id: current_user.id)
+ track_mr_snowplow_event(merge_request, current_user, :merge)
end
end
@@ -64,6 +67,7 @@ class EventCreateService
create_record_event(note, current_user, :commented).tap do
if note.is_a?(DiffNote) && note.for_merge_request?
track_event(event_action: :commented, event_target: MergeRequest, author_id: current_user.id)
+ track_mr_snowplow_event(note, current_user, :comment)
end
end
end
@@ -225,6 +229,20 @@ class EventCreateService
def track_event(**params)
Gitlab::UsageDataCounters::TrackUniqueEvents.track_event(**params)
end
+
+ def track_mr_snowplow_event(record, current_user, action)
+ return unless Feature.enabled?(:route_hll_to_snowplow_phase2)
+
+ project = record.project
+ Gitlab::Tracking.event(
+ Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION.to_s,
+ action.to_s,
+ label: 'merge_requests_users',
+ project: project,
+ namespace: project.namespace,
+ user: current_user
+ )
+ end
end
EventCreateService.prepend_mod_with('EventCreateService')
diff --git a/app/services/git/branch_push_service.rb b/app/services/git/branch_push_service.rb
index 3c27ad56ebb..91f14251608 100644
--- a/app/services/git/branch_push_service.rb
+++ b/app/services/git/branch_push_service.rb
@@ -39,6 +39,7 @@ module Git
def enqueue_update_mrs
return if params[:merge_request_branches]&.exclude?(branch_name)
+ # TODO: pass params[:push_options] to worker
UpdateMergeRequestsWorker.perform_async(
project.id,
current_user.id,
diff --git a/app/services/import/base_service.rb b/app/services/import/base_service.rb
index 4a43b2f7425..ab3e9c7abba 100644
--- a/app/services/import/base_service.rb
+++ b/app/services/import/base_service.rb
@@ -8,6 +8,10 @@ module Import
@params = params
end
+ def authorized?
+ can?(current_user, :create_projects, target_namespace)
+ end
+
private
def find_or_create_namespace(namespace, owner)
diff --git a/app/services/import/bitbucket_server_service.rb b/app/services/import/bitbucket_server_service.rb
index d1c22f06464..20f6c987c92 100644
--- a/app/services/import/bitbucket_server_service.rb
+++ b/app/services/import/bitbucket_server_service.rb
@@ -72,10 +72,6 @@ module Import
@url ||= params[:bitbucket_server_url]
end
- def authorized?
- can?(current_user, :create_projects, target_namespace)
- end
-
def allow_local_requests?
Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
diff --git a/app/services/import/fogbugz_service.rb b/app/services/import/fogbugz_service.rb
new file mode 100644
index 00000000000..d1003823456
--- /dev/null
+++ b/app/services/import/fogbugz_service.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+module Import
+ class FogbugzService < Import::BaseService
+ attr_reader :client, :params, :current_user
+
+ def execute(credentials)
+ url = credentials[:uri]
+
+ if blocked_url?(url)
+ return log_and_return_error("Invalid URL: #{url}", _("Invalid URL: %{url}") % { url: url }, :bad_request)
+ end
+
+ unless authorized?
+ return log_and_return_error(
+ "You don't have permissions to create this project",
+ _("You don't have permissions to create this project"),
+ :unauthorized
+ )
+ end
+
+ unless repo
+ return log_and_return_error(
+ "Project #{repo_id} could not be found",
+ s_("Fogbugz|Project %{repo} could not be found") % { repo: repo_id },
+ :unprocessable_entity)
+ end
+
+ project = create_project(credentials)
+
+ if project.persisted?
+ success(project)
+ elsif project.errors[:import_source_disabled].present?
+ error(project.errors[:import_source_disabled], :forbidden)
+ else
+ error(project_save_error(project), :unprocessable_entity)
+ end
+ rescue StandardError => e
+ log_and_return_error(
+ "Fogbugz import failed due to an error: #{e}",
+ s_("Fogbugz|Fogbugz import failed due to an error: %{error}" % { error: e }),
+ :bad_request)
+ end
+
+ private
+
+ def create_project(credentials)
+ Gitlab::FogbugzImport::ProjectCreator.new(
+ repo,
+ project_name,
+ target_namespace,
+ current_user,
+ credentials,
+ umap
+ ).execute
+ end
+
+ def repo_id
+ @repo_id ||= params[:repo_id]
+ end
+
+ def repo
+ @repo ||= client.repo(repo_id)
+ end
+
+ def project_name
+ @project_name ||= params[:new_name].presence || repo.name
+ end
+
+ def namespace_path
+ @namespace_path ||= params[:target_namespace].presence || current_user.namespace_path
+ end
+
+ def target_namespace
+ @target_namespace ||= find_or_create_namespace(namespace_path, current_user.namespace_path)
+ end
+
+ def umap
+ @umap ||= params[:umap]
+ end
+
+ def allow_local_requests?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
+ end
+
+ def blocked_url?(url)
+ Gitlab::UrlBlocker.blocked_url?(
+ url,
+ allow_localhost: allow_local_requests?,
+ allow_local_network: allow_local_requests?,
+ schemes: %w(http https)
+ )
+ end
+
+ def log_and_return_error(message, translated_message, error_type)
+ log_error(message)
+ error(translated_message, error_type)
+ end
+
+ def log_error(message)
+ Gitlab::Import::Logger.error(
+ message: 'Import failed due to a Fogbugz error',
+ error: message
+ )
+ end
+ end
+end
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index 033f6bcb043..ff5d5d2c4c1 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -89,10 +89,6 @@ module Import
end
end
- def authorized?
- can?(current_user, :create_projects, target_namespace)
- end
-
def url
@url ||= params[:github_hostname]
end
diff --git a/app/services/incident_management/timeline_events/base_service.rb b/app/services/incident_management/timeline_events/base_service.rb
index cae58465e4a..7168e2fdd38 100644
--- a/app/services/incident_management/timeline_events/base_service.rb
+++ b/app/services/incident_management/timeline_events/base_service.rb
@@ -3,6 +3,8 @@
module IncidentManagement
module TimelineEvents
class BaseService
+ include Gitlab::Utils::UsageData
+
def allowed?
user&.can?(:admin_incident_management_timeline_event, incident)
end
diff --git a/app/services/incident_management/timeline_events/create_service.rb b/app/services/incident_management/timeline_events/create_service.rb
index 7d287e1bd82..5e5feed65c2 100644
--- a/app/services/incident_management/timeline_events/create_service.rb
+++ b/app/services/incident_management/timeline_events/create_service.rb
@@ -3,6 +3,7 @@
module IncidentManagement
module TimelineEvents
DEFAULT_ACTION = 'comment'
+ DEFAULT_EDITABLE = false
class CreateService < TimelineEvents::BaseService
def initialize(incident, user, params)
@@ -23,7 +24,8 @@ module IncidentManagement
action: params.fetch(:action, DEFAULT_ACTION),
note_html: params[:note_html].presence || params[:note],
occurred_at: params[:occurred_at],
- promoted_from_note: params[:promoted_from_note]
+ promoted_from_note: params[:promoted_from_note],
+ editable: params.fetch(:editable, DEFAULT_EDITABLE)
}
timeline_event = IncidentManagement::TimelineEvent.new(timeline_event_params)
@@ -31,6 +33,7 @@ module IncidentManagement
if timeline_event.save
add_system_note(timeline_event)
+ track_usage_event(:incident_management_timeline_event_created, user.id)
success(timeline_event)
else
error_in_save(timeline_event)
diff --git a/app/services/incident_management/timeline_events/destroy_service.rb b/app/services/incident_management/timeline_events/destroy_service.rb
index 8bb186c289a..90e95ae8869 100644
--- a/app/services/incident_management/timeline_events/destroy_service.rb
+++ b/app/services/incident_management/timeline_events/destroy_service.rb
@@ -18,6 +18,7 @@ module IncidentManagement
if timeline_event.destroy
add_system_note(incident, user)
+ track_usage_event(:incident_management_timeline_event_deleted, user.id)
success(timeline_event)
else
error_in_save(timeline_event)
diff --git a/app/services/incident_management/timeline_events/update_service.rb b/app/services/incident_management/timeline_events/update_service.rb
index fe8b4879561..83497b123dd 100644
--- a/app/services/incident_management/timeline_events/update_service.rb
+++ b/app/services/incident_management/timeline_events/update_service.rb
@@ -17,11 +17,13 @@ module IncidentManagement
end
def execute
+ return error_non_editable unless timeline_event.editable?
return error_no_permissions unless allowed?
if timeline_event.update(update_params)
add_system_note(timeline_event)
+ track_usage_event(:incident_management_timeline_event_edited, user.id)
success(timeline_event)
else
error_in_save(timeline_event)
@@ -56,6 +58,10 @@ module IncidentManagement
:none
end
+
+ def error_non_editable
+ error(_('You cannot edit this timeline event.'))
+ end
end
end
end
diff --git a/app/services/issuable/clone/base_service.rb b/app/services/issuable/clone/base_service.rb
index 1d2c5c06d1b..ce9918a4b56 100644
--- a/app/services/issuable/clone/base_service.rb
+++ b/app/services/issuable/clone/base_service.rb
@@ -41,14 +41,15 @@ module Issuable
end
def update_new_entity_description
- rewritten_description = MarkdownContentRewriterService.new(
+ update_description_params = MarkdownContentRewriterService.new(
current_user,
- original_entity.description,
+ original_entity,
+ :description,
original_entity.project,
new_parent
).execute
- new_entity.update!(description: rewritten_description)
+ new_entity.update!(update_description_params)
end
def update_new_entity_attributes
diff --git a/app/services/issuable/common_system_notes_service.rb b/app/services/issuable/common_system_notes_service.rb
index 9ee54c7ba0f..5cf32ee3e40 100644
--- a/app/services/issuable/common_system_notes_service.rb
+++ b/app/services/issuable/common_system_notes_service.rb
@@ -54,7 +54,7 @@ module Issuable
def create_draft_note(old_title)
return unless issuable.is_a?(MergeRequest)
- if MergeRequest.work_in_progress?(old_title) != issuable.work_in_progress?
+ if MergeRequest.draft?(old_title) != issuable.draft?
SystemNoteService.handle_merge_request_draft(issuable, issuable.project, current_user)
end
end
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index 7ab663718db..edf6d75b632 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -56,6 +56,7 @@ module Issues
handle_add_related_issue(issue)
resolve_discussions_with_issue(issue)
create_escalation_status(issue)
+ try_to_associate_contact(issue)
super
end
@@ -99,6 +100,13 @@ module Issues
IssueLinks::CreateService.new(issue, issue.author, { target_issuable: @add_related_issue }).execute
end
+
+ def try_to_associate_contact(issue)
+ return unless issue.external_author
+ return unless current_user.can?(:set_issue_crm_contacts, issue)
+
+ set_crm_contacts(issue, [issue.external_author])
+ end
end
end
diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb
index e210e6a2362..d210ba2a76c 100644
--- a/app/services/issues/move_service.rb
+++ b/app/services/issues/move_service.rb
@@ -110,7 +110,6 @@ module Issues
end
def copy_contacts
- return unless Feature.enabled?(:customer_relations, original_entity.project.root_ancestor)
return unless original_entity.project.root_ancestor == new_entity.project.root_ancestor
new_entity.customer_relations_contacts = original_entity.customer_relations_contacts
diff --git a/app/services/jira_connect_subscriptions/create_service.rb b/app/services/jira_connect_subscriptions/create_service.rb
index 2f31a3c8d4e..d5ab3800dcf 100644
--- a/app/services/jira_connect_subscriptions/create_service.rb
+++ b/app/services/jira_connect_subscriptions/create_service.rb
@@ -5,13 +5,18 @@ module JiraConnectSubscriptions
include Gitlab::Utils::StrongMemoize
MERGE_REQUEST_SYNC_BATCH_SIZE = 20
MERGE_REQUEST_SYNC_BATCH_DELAY = 1.minute.freeze
- NOT_SITE_ADMIN = 'The Jira user is not a site administrator.'
def execute
- return error(NOT_SITE_ADMIN, 403) unless can_administer_jira?
+ if !params[:jira_user]
+ return error(s_('JiraConnect|Could not fetch user information from Jira. ' \
+ 'Check the permissions in Jira and try again.'), 403)
+ elsif !can_administer_jira?
+ return error(s_('JiraConnect|The Jira user is not a site administrator. ' \
+ 'Check the permissions in Jira and try again.'), 403)
+ end
unless namespace && can?(current_user, :create_jira_connect_subscription, namespace)
- return error('Invalid namespace. Please make sure you have sufficient permissions', 401)
+ return error(s_('JiraConnect|Cannot find namespace. Make sure you have sufficient permissions.'), 401)
end
create_subscription
@@ -20,7 +25,7 @@ module JiraConnectSubscriptions
private
def can_administer_jira?
- @params[:jira_user]&.site_admin?
+ params[:jira_user]&.site_admin?
end
def create_subscription
diff --git a/app/services/markdown_content_rewriter_service.rb b/app/services/markdown_content_rewriter_service.rb
index bc6fd592eaa..4d8f523fa77 100644
--- a/app/services/markdown_content_rewriter_service.rb
+++ b/app/services/markdown_content_rewriter_service.rb
@@ -4,29 +4,69 @@
# which rewrite references to GitLab objects and uploads within the content
# based on their visibility by the `target_parent`.
class MarkdownContentRewriterService
- REWRITERS = [Gitlab::Gfm::ReferenceRewriter, Gitlab::Gfm::UploadsRewriter].freeze
+ include Gitlab::Utils::StrongMemoize
- def initialize(current_user, content, source_parent, target_parent)
- # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39654#note_399095117
- raise ArgumentError, 'The rewriter classes require that `source_parent` is a `Project`' \
- unless source_parent.is_a?(Project)
+ REWRITERS = [Gitlab::Gfm::ReferenceRewriter, Gitlab::Gfm::UploadsRewriter].freeze
+ def initialize(current_user, object, field, source_parent, target_parent)
@current_user = current_user
- @content = content.presence
@source_parent = source_parent
@target_parent = target_parent
+ @object = object
+ @field = field
+
+ validate_parameters!
+
+ @content = object[field].dup.presence
+ @html_field = object.cached_markdown_fields.html_field(field)
+ @content_html = object.cached_html_for(field)
+
+ @rewriters =
+ REWRITERS.map do |rewriter_class|
+ rewriter_class.new(@content, content_html, source_parent, current_user)
+ end
+
+ @result = {
+ field => nil,
+ html_field => nil
+ }.with_indifferent_access
end
def execute
- return unless content
+ return result unless content
- REWRITERS.inject(content) do |text, klass|
- rewriter = klass.new(text, source_parent, current_user)
- rewriter.rewrite(target_parent)
+ unless safe_to_copy_markdown?
+ rewriters.each do |rewriter|
+ rewriter.rewrite(target_parent)
+ end
+ end
+
+ result[field] = content
+ result[html_field] = content_html if safe_to_copy_markdown?
+ result[:skip_markdown_cache_validation] = safe_to_copy_markdown?
+
+ result
+ end
+
+ def safe_to_copy_markdown?
+ strong_memoize(:safe_to_copy_markdown) do
+ rewriters.none?(&:needs_rewrite?)
end
end
private
- attr_reader :current_user, :content, :source_parent, :target_parent
+ def validate_parameters!
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39654#note_399095117
+ raise ArgumentError, 'The rewriter classes require that `source_parent` is a `Project`' \
+ unless source_parent.is_a?(Project)
+
+ if object.cached_markdown_fields[field].nil?
+ raise ArgumentError, 'The `field` attribute does not contain cached markdown'
+ end
+ end
+
+ attr_reader :current_user, :content, :source_parent,
+ :target_parent, :rewriters, :content_html,
+ :field, :html_field, :object, :result
end
diff --git a/app/services/members/approve_access_request_service.rb b/app/services/members/approve_access_request_service.rb
index 919c22894c1..5337279f702 100644
--- a/app/services/members/approve_access_request_service.rb
+++ b/app/services/members/approve_access_request_service.rb
@@ -3,7 +3,7 @@
module Members
class ApproveAccessRequestService < Members::BaseService
def execute(access_requester, skip_authorization: false, skip_log_audit_event: false)
- raise Gitlab::Access::AccessDeniedError unless skip_authorization || can_update_access_requester?(access_requester)
+ validate_access!(access_requester) unless skip_authorization
access_requester.access_level = params[:access_level] if params[:access_level]
access_requester.accept_request
@@ -15,9 +15,24 @@ module Members
private
+ def validate_access!(access_requester)
+ raise Gitlab::Access::AccessDeniedError unless can_update_access_requester?(access_requester)
+
+ if approving_member_with_owner_access_level?(access_requester) &&
+ cannot_assign_owner_responsibilities_to_member_in_project?(access_requester)
+ raise Gitlab::Access::AccessDeniedError
+ end
+ end
+
def can_update_access_requester?(access_requester)
can?(current_user, update_member_permission(access_requester), access_requester)
end
+
+ def approving_member_with_owner_access_level?(access_requester)
+ access_level_value = params[:access_level] || access_requester.access_level
+
+ access_level_value == Gitlab::Access::OWNER
+ end
end
end
diff --git a/app/services/members/base_service.rb b/app/services/members/base_service.rb
index 3f55f661d9b..62b8fc5d6f7 100644
--- a/app/services/members/base_service.rb
+++ b/app/services/members/base_service.rb
@@ -60,5 +60,18 @@ module Members
TodosDestroyer::EntityLeaveWorker.perform_in(Todo::WAIT_FOR_DELETE, member.user_id, member.source_id, type)
end
end
+
+ def cannot_assign_owner_responsibilities_to_member_in_project?(member)
+ # The purpose of this check is -
+ # We can have direct members who are "Owners" in a project going forward and
+ # we do not want Maintainers of the project updating/adding/removing other "Owners"
+ # within the project.
+ # Only OWNERs in a project should be able to manage any action around OWNERship in that project.
+ member.is_a?(ProjectMember) &&
+ !can?(current_user, :manage_owners, member.source)
+ end
+
+ alias_method :cannot_revoke_owner_responsibilities_from_member_in_project?,
+ :cannot_assign_owner_responsibilities_to_member_in_project?
end
end
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index 8485e7cbafa..57d9da4cefd 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -22,6 +22,11 @@ module Members
def execute
raise Gitlab::Access::AccessDeniedError unless can?(current_user, create_member_permission(source), source)
+ # rubocop:disable Layout/EmptyLineAfterGuardClause
+ raise Gitlab::Access::AccessDeniedError if adding_at_least_one_owner &&
+ cannot_assign_owner_responsibilities_to_member_in_project?
+ # rubocop:enable Layout/EmptyLineAfterGuardClause
+
validate_invite_source!
validate_invitable!
@@ -45,6 +50,14 @@ module Members
attr_reader :source, :errors, :invites, :member_created_namespace_id, :members,
:tasks_to_be_done_members, :member_created_member_task_id
+ def adding_at_least_one_owner
+ params[:access_level] == Gitlab::Access::OWNER
+ end
+
+ def cannot_assign_owner_responsibilities_to_member_in_project?
+ source.is_a?(Project) && !current_user.can?(:manage_owners, source)
+ end
+
def invites_from_params
# String, Nil, Array, Integer
return params[:user_id] if params[:user_id].is_a?(Array)
diff --git a/app/services/members/creator_service.rb b/app/services/members/creator_service.rb
index 81986a2883f..276093a00a9 100644
--- a/app/services/members/creator_service.rb
+++ b/app/services/members/creator_service.rb
@@ -12,6 +12,105 @@ module Members
def access_levels
Gitlab::Access.sym_options_with_owner
end
+
+ def add_users( # rubocop:disable Metrics/ParameterLists
+ source,
+ users,
+ access_level,
+ current_user: nil,
+ expires_at: nil,
+ tasks_to_be_done: [],
+ tasks_project_id: nil,
+ ldap: nil,
+ blocking_refresh: nil
+ )
+ return [] unless users.present?
+
+ # If this user is attempting to manage Owner members and doesn't have permission, do not allow
+ return [] if managing_owners?(current_user, access_level) && cannot_manage_owners?(source, current_user)
+
+ emails, users, existing_members = parse_users_list(source, users)
+
+ Member.transaction do
+ (emails + users).map! do |user|
+ new(source,
+ user,
+ access_level,
+ existing_members: existing_members,
+ current_user: current_user,
+ expires_at: expires_at,
+ tasks_to_be_done: tasks_to_be_done,
+ tasks_project_id: tasks_project_id,
+ ldap: ldap,
+ blocking_refresh: blocking_refresh)
+ .execute
+ end
+ end
+ end
+
+ def add_user( # rubocop:disable Metrics/ParameterLists
+ source,
+ user,
+ access_level,
+ current_user: nil,
+ expires_at: nil,
+ ldap: nil,
+ blocking_refresh: nil
+ )
+ add_users(source,
+ [user],
+ access_level,
+ current_user: current_user,
+ expires_at: expires_at,
+ ldap: ldap,
+ blocking_refresh: blocking_refresh).first
+ end
+
+ private
+
+ def managing_owners?(current_user, access_level)
+ current_user && Gitlab::Access.sym_options_with_owner[access_level] == Gitlab::Access::OWNER
+ end
+
+ def parse_users_list(source, list)
+ emails = []
+ user_ids = []
+ users = []
+ existing_members = {}
+
+ list.each do |item|
+ case item
+ when User
+ users << item
+ when Integer
+ user_ids << item
+ when /\A\d+\Z/
+ user_ids << item.to_i
+ when Devise.email_regexp
+ emails << item
+ end
+ end
+
+ # the below will automatically discard invalid user_ids
+ users.concat(User.id_in(user_ids)) if user_ids.present?
+ # de-duplicate just in case as there is no controlling if user records and ids are sent multiple times
+ users.uniq!
+
+ users_by_emails = source.users_by_emails(emails) # preloads our request store for all emails
+ # in case emails belong to a user that is being invited by user or user_id, remove them from
+ # emails and let users/user_ids handle it.
+ parsed_emails = emails.select do |email|
+ user = users_by_emails[email]
+ !user || (users.exclude?(user) && user_ids.exclude?(user.id))
+ end
+
+ if users.present? || users_by_emails.present?
+ # helps not have to perform another query per user id to see if the member exists later on when fetching
+ existing_members = source.members_and_requesters.with_user(users + users_by_emails.values).index_by(&:user_id)
+ end
+
+ [parsed_emails, users, existing_members]
+ end
end
def initialize(source, user, access_level, **args)
@@ -21,10 +120,12 @@ module Members
@args = args
end
+ private_class_method :new
+
def execute
find_or_build_member
commit_member
- create_member_task
+ after_commit_tasks
member
end
@@ -92,6 +193,10 @@ module Members
end
end
+ def after_commit_tasks
+ create_member_task
+ end
+
def create_member_task
return unless member.persisted?
return if member_task_attributes.value?(nil)
@@ -163,15 +268,19 @@ module Members
end
def find_or_initialize_member_by_user
- # have to use members and requesters here since project/group limits on requested_at being nil for members and
- # wouldn't be found in `source.members` if it already existed
- # this of course will not treat active invites the same since we aren't searching on email
- source.members_and_requesters.find_or_initialize_by(user_id: user.id) # rubocop:disable CodeReuse/ActiveRecord
+ # We have to use `members_and_requesters` here since the given `members` is modified in the models
+ # to act more like a scope(removing the requested_at members) and therefore ActiveRecord has issues with that
+ # on build and refreshing that relation.
+ existing_members[user.id] || source.members_and_requesters.build(user_id: user.id) # rubocop:disable CodeReuse/ActiveRecord
end
def ldap
args[:ldap] || false
end
+
+ def existing_members
+ args[:existing_members] || {}
+ end
end
end
diff --git a/app/services/members/destroy_service.rb b/app/services/members/destroy_service.rb
index bb2d419c046..0a8344c58db 100644
--- a/app/services/members/destroy_service.rb
+++ b/app/services/members/destroy_service.rb
@@ -3,7 +3,12 @@
module Members
class DestroyService < Members::BaseService
def execute(member, skip_authorization: false, skip_subresources: false, unassign_issuables: false, destroy_bot: false)
- raise Gitlab::Access::AccessDeniedError unless skip_authorization || authorized?(member, destroy_bot)
+ unless skip_authorization
+ raise Gitlab::Access::AccessDeniedError unless authorized?(member, destroy_bot)
+
+ raise Gitlab::Access::AccessDeniedError if destroying_member_with_owner_access_level?(member) &&
+ cannot_revoke_owner_responsibilities_from_member_in_project?(member)
+ end
@skip_auth = skip_authorization
@@ -90,6 +95,10 @@ module Members
can?(current_user, destroy_bot_member_permission(member), member)
end
+ def destroying_member_with_owner_access_level?(member)
+ member.owner?
+ end
+
def destroy_member_permission(member)
case member
when GroupMember
diff --git a/app/services/members/groups/bulk_creator_service.rb b/app/services/members/groups/bulk_creator_service.rb
deleted file mode 100644
index 57cec241584..00000000000
--- a/app/services/members/groups/bulk_creator_service.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-module Members
- module Groups
- class BulkCreatorService < Members::Groups::CreatorService
- include Members::BulkCreateUsers
- end
- end
-end
diff --git a/app/services/members/groups/creator_service.rb b/app/services/members/groups/creator_service.rb
index a6f0daa99aa..dd3d44e4d96 100644
--- a/app/services/members/groups/creator_service.rb
+++ b/app/services/members/groups/creator_service.rb
@@ -3,6 +3,12 @@
module Members
module Groups
class CreatorService < Members::CreatorService
+ class << self
+ def cannot_manage_owners?(source, current_user)
+ source.max_member_access_for_user(current_user) < Gitlab::Access::OWNER
+ end
+ end
+
private
def can_create_new_member?
diff --git a/app/services/members/mailgun/process_webhook_service.rb b/app/services/members/mailgun/process_webhook_service.rb
deleted file mode 100644
index e359a83ad42..00000000000
--- a/app/services/members/mailgun/process_webhook_service.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-module Members
- module Mailgun
- class ProcessWebhookService
- ProcessWebhookServiceError = Class.new(StandardError)
-
- def initialize(payload)
- @payload = payload
- end
-
- def execute
- @member = Member.find_by_invite_token(invite_token)
- update_member_and_log if member
- rescue ProcessWebhookServiceError => e
- Gitlab::ErrorTracking.track_exception(e)
- end
-
- private
-
- attr_reader :payload, :member
-
- def update_member_and_log
- log_update_event if member.update(invite_email_success: false)
- end
-
- def log_update_event
- Gitlab::AppLogger.info "UPDATED MEMBER INVITE_EMAIL_SUCCESS: member_id: #{member.id}"
- end
-
- def invite_token
- # may want to validate schema in some way using ::JSONSchemer.schema(SCHEMA_PATH).valid?(message) if this
- # gets more complex
- payload.dig('user-variables', ::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY) ||
- raise(ProcessWebhookServiceError, "Failed to receive #{::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY} in user-variables: #{payload}")
- end
- end
- end
-end
diff --git a/app/services/members/projects/bulk_creator_service.rb b/app/services/members/projects/bulk_creator_service.rb
deleted file mode 100644
index 68e71e35d12..00000000000
--- a/app/services/members/projects/bulk_creator_service.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-module Members
- module Projects
- class BulkCreatorService < Members::Projects::CreatorService
- include Members::BulkCreateUsers
- end
- end
-end
diff --git a/app/services/members/projects/creator_service.rb b/app/services/members/projects/creator_service.rb
index 9e9389d3c18..cde1d0462e8 100644
--- a/app/services/members/projects/creator_service.rb
+++ b/app/services/members/projects/creator_service.rb
@@ -3,9 +3,18 @@
module Members
module Projects
class CreatorService < Members::CreatorService
+ class << self
+ def cannot_manage_owners?(source, current_user)
+ !Ability.allowed?(current_user, :manage_owners, source)
+ end
+ end
+
private
def can_create_new_member?
+ return false if assigning_project_member_with_owner_access_level? &&
+ cannot_assign_owner_responsibilities_to_member_in_project?
+
# This access check(`admin_project_member`) will write to safe request store cache for the user being added.
# This means any operations inside the same request will need to purge that safe request
# store cache if operations are needed to be done inside the same request that checks max member access again on
@@ -14,6 +23,11 @@ module Members
end
def can_update_existing_member?
+ # rubocop:disable Layout/EmptyLineAfterGuardClause
+ raise ::Gitlab::Access::AccessDeniedError if assigning_project_member_with_owner_access_level? &&
+ cannot_assign_owner_responsibilities_to_member_in_project?
+ # rubocop:enable Layout/EmptyLineAfterGuardClause
+
current_user.can?(:update_project_member, member)
end
@@ -21,6 +35,16 @@ module Members
# this condition is reached during testing setup a lot due to use of `.add_user`
member.project.personal_namespace_holder?(member.user)
end
+
+ def assigning_project_member_with_owner_access_level?
+ return true if member && member.owner?
+
+ access_level == Gitlab::Access::OWNER
+ end
+
+ def cannot_assign_owner_responsibilities_to_member_in_project?
+ member.is_a?(ProjectMember) && !current_user.can?(:manage_owners, member.source)
+ end
end
end
end
diff --git a/app/services/members/update_service.rb b/app/services/members/update_service.rb
index 257698f65ae..b4d1b80e5a3 100644
--- a/app/services/members/update_service.rb
+++ b/app/services/members/update_service.rb
@@ -5,6 +5,7 @@ module Members
# returns the updated member
def execute(member, permission: :update)
raise Gitlab::Access::AccessDeniedError unless can?(current_user, action_member_permission(permission, member), member)
+ raise Gitlab::Access::AccessDeniedError if prevent_upgrade_to_owner?(member) || prevent_downgrade_from_owner?(member)
old_access_level = member.human_access
old_expiry = member.expires_at
@@ -28,6 +29,22 @@ module Members
def downgrading_to_guest?
params[:access_level] == Gitlab::Access::GUEST
end
+
+ def upgrading_to_owner?
+ params[:access_level] == Gitlab::Access::OWNER
+ end
+
+ def downgrading_from_owner?(member)
+ member.owner?
+ end
+
+ def prevent_upgrade_to_owner?(member)
+ upgrading_to_owner? && cannot_assign_owner_responsibilities_to_member_in_project?(member)
+ end
+
+ def prevent_downgrade_from_owner?(member)
+ downgrading_from_owner?(member) && cannot_revoke_owner_responsibilities_from_member_in_project?(member)
+ end
end
end
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index 44be254441d..2b6a66b9dee 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -121,16 +121,21 @@ module MergeRequests
override :handle_quick_actions
def handle_quick_actions(merge_request)
super
- handle_wip_event(merge_request)
+ handle_draft_event(merge_request)
end
- def handle_wip_event(merge_request)
- if wip_event = params.delete(:wip_event)
+ def handle_draft_event(merge_request)
+ if draft_event = params.delete(:wip_event)
# We update the title that is provided in the params or we use the mr title
title = params[:title] || merge_request.title
- params[:title] = case wip_event
- when 'wip' then MergeRequest.wip_title(title)
- when 'unwip' then MergeRequest.wipless_title(title)
+ # Supports both `wip` and `draft` permutations of draft_event
+ # This support can be removed >= %15.2
+ #
+ params[:title] = case draft_event
+ when 'wip' then MergeRequest.draft_title(title)
+ when 'draft' then MergeRequest.draft_title(title)
+ when 'unwip' then MergeRequest.draftless_title(title)
+ when 'ready' then MergeRequest.draftless_title(title)
end
end
end
@@ -185,9 +190,12 @@ module MergeRequests
def create_pipeline_for(merge_request, user, async: false)
if async
+ # TODO: pass push_options to worker
MergeRequests::CreatePipelineWorker.perform_async(project.id, user.id, merge_request.id)
else
- MergeRequests::CreatePipelineService.new(project: project, current_user: user).execute(merge_request)
+ MergeRequests::CreatePipelineService
+ .new(project: project, current_user: user, params: params.slice(:push_options))
+ .execute(merge_request)
end
end
diff --git a/app/services/merge_requests/build_service.rb b/app/services/merge_requests/build_service.rb
index ee6f204be45..cc786ac02bd 100644
--- a/app/services/merge_requests/build_service.rb
+++ b/app/services/merge_requests/build_service.rb
@@ -46,7 +46,7 @@ module MergeRequests
:source_branch_ref,
:source_project,
:compare_commits,
- :wip_title,
+ :draft_title,
:description,
:first_multiline_commit,
:errors,
@@ -206,7 +206,7 @@ module MergeRequests
def set_draft_title_if_needed
return unless compare_commits.empty? || Gitlab::Utils.to_boolean(params[:draft])
- merge_request.title = wip_title
+ merge_request.title = draft_title
end
# When your branch name starts with an iid followed by a dash this pattern will be
@@ -223,6 +223,7 @@ module MergeRequests
# more than one commit in the MR
#
def assign_title_and_description
+ assign_description_from_repository_template
assign_title_and_description_from_commits
merge_request.title ||= title_from_issue if target_project.issues_enabled? || target_project.external_issue_tracker
merge_request.title ||= source_branch.titleize.humanize
@@ -286,6 +287,37 @@ module MergeRequests
title_parts.join(' ')
end
+ def assign_description_from_repository_template
+ return unless merge_request.description.blank?
+
+ # Use TemplateFinder to load the default template. We need this mainly for
+ # the project_id, in case it differs from the target project. Conveniently,
+ # since the underlying merge_request_template_names_hash is cached, this
+ # should also be relatively cheap and allows us to bail early if the project
+ # does not have a default template.
+ templates = TemplateFinder.all_template_names(target_project, :merge_requests)
+ template = templates.values.flatten.find { |tmpl| tmpl[:name].casecmp?('default') }
+
+ return unless template
+
+ begin
+ repository_template = TemplateFinder.build(
+ :merge_requests,
+ target_project,
+ {
+ name: template[:name],
+ source_template_project_id: template[:project_id]
+ }
+ ).execute
+ rescue ::Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError
+ return
+ end
+
+ return unless repository_template.present?
+
+ merge_request.description = repository_template.content
+ end
+
def issue_iid
strong_memoize(:issue_iid) do
@params_issue_iid || begin
diff --git a/app/services/merge_requests/create_pipeline_service.rb b/app/services/merge_requests/create_pipeline_service.rb
index 9d7f8393ba5..37c734613e7 100644
--- a/app/services/merge_requests/create_pipeline_service.rb
+++ b/app/services/merge_requests/create_pipeline_service.rb
@@ -9,9 +9,11 @@ module MergeRequests
end
def create_detached_merge_request_pipeline(merge_request)
- Ci::CreatePipelineService.new(pipeline_project(merge_request),
- current_user,
- ref: pipeline_ref_for_detached_merge_request_pipeline(merge_request))
+ Ci::CreatePipelineService
+ .new(pipeline_project(merge_request),
+ current_user,
+ ref: pipeline_ref_for_detached_merge_request_pipeline(merge_request),
+ push_options: params[:push_options])
.execute(:merge_request_event, merge_request: merge_request)
end
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index 5e7eee4f1c3..f51923b7035 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -117,6 +117,8 @@ module MergeRequests
if delete_source_branch?
MergeRequests::DeleteSourceBranchWorker.perform_async(@merge_request.id, @merge_request.source_branch_sha, branch_deletion_user.id)
end
+
+ merge_request_merge_param
end
def clean_merge_jid
@@ -135,6 +137,12 @@ module MergeRequests
@merge_request.can_remove_source_branch?(branch_deletion_user)
end
+ def merge_request_merge_param
+ if @merge_request.can_remove_source_branch?(branch_deletion_user) && !params.fetch('should_remove_source_branch', nil).nil?
+ @merge_request.update(merge_params: @merge_request.merge_params.merge('should_remove_source_branch' => params['should_remove_source_branch']))
+ end
+ end
+
def handle_merge_error(log_message:, save_message_on_model: false)
Gitlab::AppLogger.error("MergeService ERROR: #{merge_request_info} - #{log_message}")
@merge_request.update(merge_error: log_message) if save_message_on_model
diff --git a/app/services/merge_requests/mergeability/run_checks_service.rb b/app/services/merge_requests/mergeability/run_checks_service.rb
index fd6907c976b..1d4b96b3090 100644
--- a/app/services/merge_requests/mergeability/run_checks_service.rb
+++ b/app/services/merge_requests/mergeability/run_checks_service.rb
@@ -4,23 +4,13 @@ module MergeRequests
class RunChecksService
include Gitlab::Utils::StrongMemoize
- # We want to have the cheapest checks first in the list,
- # that way we can fail fast before running the more expensive ones
- CHECKS = [
- CheckOpenStatusService,
- CheckDraftStatusService,
- CheckBrokenStatusService,
- CheckDiscussionsStatusService,
- CheckCiStatusService
- ].freeze
-
def initialize(merge_request:, params:)
@merge_request = merge_request
@params = params
end
def execute
- CHECKS.each_with_object([]) do |check_class, results|
+ merge_request.mergeability_checks.each_with_object([]) do |check_class, results|
check = check_class.new(merge_request: merge_request, params: params)
next if check.skip?
diff --git a/app/services/merge_requests/post_merge_service.rb b/app/services/merge_requests/post_merge_service.rb
index 980c757bcbc..286c082ac8a 100644
--- a/app/services/merge_requests/post_merge_service.rb
+++ b/app/services/merge_requests/post_merge_service.rb
@@ -48,7 +48,7 @@ module MergeRequests
# We are intentionally only closing Issues asynchronously (excluding ExternalIssues)
# as the worker only supports finding an Issue. We are also only experiencing
# SQL timeouts when closing an Issue.
- if Feature.enabled?(:async_mr_close_issue, project) && issue.is_a?(Issue)
+ if issue.is_a?(Issue)
MergeRequests::CloseIssueWorker.perform_async(
project.id,
current_user.id,
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index f7a0f90b95f..5205d34baae 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -255,17 +255,17 @@ module MergeRequests
commit_shas = merge_request.commit_shas
- wip_commit = @commits.detect do |commit|
- commit.work_in_progress? && commit_shas.include?(commit.sha)
+ draft_commit = @commits.detect do |commit|
+ commit.draft? && commit_shas.include?(commit.sha)
end
- if wip_commit && !merge_request.work_in_progress?
- merge_request.update(title: merge_request.wip_title)
+ if draft_commit && !merge_request.draft?
+ merge_request.update(title: merge_request.draft_title)
SystemNoteService.add_merge_request_draft_from_commit(
merge_request,
merge_request.project,
@current_user,
- wip_commit
+ draft_commit
)
end
end
diff --git a/app/services/merge_requests/reload_merge_head_diff_service.rb b/app/services/merge_requests/reload_merge_head_diff_service.rb
index 4724dd1c068..f6506779aba 100644
--- a/app/services/merge_requests/reload_merge_head_diff_service.rb
+++ b/app/services/merge_requests/reload_merge_head_diff_service.rb
@@ -43,3 +43,5 @@ module MergeRequests
end
end
end
+
+MergeRequests::ReloadMergeHeadDiffService.prepend_mod
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index 6e8afaecbba..603da4ef535 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -180,16 +180,16 @@ module MergeRequests
return unless changed_fields.include?("title")
old_title, new_title = merge_request.previous_changes["title"]
- old_title_wip = MergeRequest.work_in_progress?(old_title)
- new_title_wip = MergeRequest.work_in_progress?(new_title)
+ old_title_draft = MergeRequest.draft?(old_title)
+ new_title_draft = MergeRequest.draft?(new_title)
- if !old_title_wip && new_title_wip
- # Marked as Draft/WIP
+ if !old_title_draft && new_title_draft
+ # Marked as Draft
#
merge_request_activity_counter
.track_marked_as_draft_action(user: current_user)
- elsif old_title_wip && !new_title_wip
- # Unmarked as Draft/WIP
+ elsif old_title_draft && !new_title_draft
+ # Unmarked as Draft
#
notify_draft_status_changed(merge_request)
diff --git a/app/services/metrics/dashboard/base_service.rb b/app/services/metrics/dashboard/base_service.rb
index 5be8ae62548..5975fa28b0b 100644
--- a/app/services/metrics/dashboard/base_service.rb
+++ b/app/services/metrics/dashboard/base_service.rb
@@ -14,7 +14,6 @@ module Metrics
STAGES::VariableEndpointInserter,
STAGES::PanelIdsInserter,
STAGES::TrackPanelType,
- STAGES::AlertsInserter,
STAGES::UrlValidator
].freeze
diff --git a/app/services/metrics/dashboard/panel_preview_service.rb b/app/services/metrics/dashboard/panel_preview_service.rb
index 02dd908e229..260b49a5b19 100644
--- a/app/services/metrics/dashboard/panel_preview_service.rb
+++ b/app/services/metrics/dashboard/panel_preview_service.rb
@@ -10,7 +10,6 @@ module Metrics
::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
::Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
::Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
::Gitlab::Metrics::Dashboard::Stages::UrlValidator
].freeze
diff --git a/app/services/metrics/dashboard/system_dashboard_service.rb b/app/services/metrics/dashboard/system_dashboard_service.rb
index 29b8f23f40d..1bd31b2ba21 100644
--- a/app/services/metrics/dashboard/system_dashboard_service.rb
+++ b/app/services/metrics/dashboard/system_dashboard_service.rb
@@ -17,8 +17,7 @@ module Metrics
STAGES::CustomMetricsDetailsInserter,
STAGES::MetricEndpointInserter,
STAGES::VariableEndpointInserter,
- STAGES::PanelIdsInserter,
- STAGES::AlertsInserter
+ STAGES::PanelIdsInserter
].freeze
class << self
diff --git a/app/services/note_summary.rb b/app/services/note_summary.rb
index 6fe14939aaa..0f2555b9ff0 100644
--- a/app/services/note_summary.rb
+++ b/app/services/note_summary.rb
@@ -4,9 +4,9 @@ class NoteSummary
attr_reader :note
attr_reader :metadata
- def initialize(noteable, project, author, body, action: nil, commit_count: nil)
+ def initialize(noteable, project, author, body, action: nil, commit_count: nil, created_at: nil)
@note = { noteable: noteable,
- created_at: noteable.system_note_timestamp,
+ created_at: created_at || noteable.system_note_timestamp,
project: project, author: author, note: body }
@metadata = { action: action, commit_count: commit_count }.compact
diff --git a/app/services/notes/copy_service.rb b/app/services/notes/copy_service.rb
index 6e5b4596602..e7182350837 100644
--- a/app/services/notes/copy_service.rb
+++ b/app/services/notes/copy_service.rb
@@ -38,17 +38,16 @@ module Notes
def params_from_note(note, new_note)
new_discussion_ids[note.discussion_id] ||= Discussion.discussion_id(new_note)
- rewritten_note = MarkdownContentRewriterService.new(current_user, note.note, from_project, to_noteable.resource_parent).execute
- new_params = {
+ new_params = sanitized_note_params(note)
+
+ new_params.merge!(
project: to_noteable.project,
noteable: to_noteable,
discussion_id: new_discussion_ids[note.discussion_id],
- note: rewritten_note,
- note_html: nil,
created_at: note.created_at,
updated_at: note.updated_at
- }
+ )
if note.system_note_metadata
new_params[:system_note_metadata] = note.system_note_metadata.dup
@@ -61,6 +60,14 @@ module Notes
new_params
end
+ # Skip copying cached markdown HTML if text
+ # does not contain references or uploads.
+ def sanitized_note_params(note)
+ MarkdownContentRewriterService
+ .new(current_user, note, :note, from_project, to_noteable.resource_parent)
+ .execute
+ end
+
def copy_award_emoji(from_note, to_note)
AwardEmojis::CopyService.new(from_note, to_note).execute
end
diff --git a/app/services/notification_recipients/build_service.rb b/app/services/notification_recipients/build_service.rb
index aeb859af4d9..e63e19e365c 100644
--- a/app/services/notification_recipients/build_service.rb
+++ b/app/services/notification_recipients/build_service.rb
@@ -29,10 +29,6 @@ module NotificationRecipients
::NotificationRecipients::Builder::ProjectMaintainers.new(target, **args).notification_recipients
end
- def self.build_new_release_recipients(*args)
- ::NotificationRecipients::Builder::NewRelease.new(*args).notification_recipients
- end
-
def self.build_new_review_recipients(*args)
::NotificationRecipients::Builder::NewReview.new(*args).notification_recipients
end
diff --git a/app/services/notification_recipients/builder/new_release.rb b/app/services/notification_recipients/builder/new_release.rb
deleted file mode 100644
index 67676b6eec8..00000000000
--- a/app/services/notification_recipients/builder/new_release.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-module NotificationRecipients
- module Builder
- class NewRelease < Base
- attr_reader :target
-
- def initialize(target)
- @target = target
- end
-
- def build!
- add_recipients(target.project.authorized_users, :custom, nil)
- end
-
- def custom_action
- :new_release
- end
-
- def acting_user
- target.author
- end
- end
- end
-end
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 32b23d4978f..2477fcd02e5 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -447,7 +447,9 @@ class NotificationService
return false
end
- recipients = NotificationRecipients::BuildService.build_new_release_recipients(release)
+ recipients = NotificationRecipients::BuildService.build_recipients(release,
+ release.author,
+ action: "new")
recipients.each do |recipient|
mailer.new_release_email(recipient.user.id, release, recipient.reason).deliver_later
diff --git a/app/services/packages/cleanup/update_policy_service.rb b/app/services/packages/cleanup/update_policy_service.rb
new file mode 100644
index 00000000000..6744accc007
--- /dev/null
+++ b/app/services/packages/cleanup/update_policy_service.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Packages
+ module Cleanup
+ class UpdatePolicyService < BaseProjectService
+ ALLOWED_ATTRIBUTES = %i[keep_n_duplicated_package_files].freeze
+
+ def execute
+ return ServiceResponse.error(message: 'Access denied') unless allowed?
+
+ if policy.update(policy_params)
+ ServiceResponse.success(payload: { packages_cleanup_policy: policy })
+ else
+ ServiceResponse.error(message: policy.errors.full_messages.to_sentence || 'Bad request')
+ end
+ end
+
+ private
+
+ def policy
+ strong_memoize(:policy) do
+ project.packages_cleanup_policy
+ end
+ end
+
+ def allowed?
+ can?(current_user, :admin_package, project)
+ end
+
+ def policy_params
+ params.slice(*ALLOWED_ATTRIBUTES)
+ end
+ end
+ end
+end
diff --git a/app/services/packages/go/create_package_service.rb b/app/services/packages/go/create_package_service.rb
index 2a6eeff402e..0e5771a2e73 100644
--- a/app/services/packages/go/create_package_service.rb
+++ b/app/services/packages/go/create_package_service.rb
@@ -38,11 +38,12 @@ module Packages
raise GoZipSizeError, "#{version.mod.name}@#{version.name}.#{type} exceeds size limit" if file.size > project.actual_limits.golang_max_file_size
digests = {
- md5: Digest::MD5.hexdigest(content),
sha1: Digest::SHA1.hexdigest(content),
sha256: Digest::SHA256.hexdigest(content)
}
+ digests[:md5] = Digest::MD5.hexdigest(content) unless Gitlab::FIPS.enabled?
+
[file, digests]
end
diff --git a/app/services/packages/maven/metadata/append_package_file_service.rb b/app/services/packages/maven/metadata/append_package_file_service.rb
index e991576ebc6..c778620ea8e 100644
--- a/app/services/packages/maven/metadata/append_package_file_service.rb
+++ b/app/services/packages/maven/metadata/append_package_file_service.rb
@@ -36,7 +36,7 @@ module Packages
sha256: file_sha256
)
- append_metadata_file(content: file_md5, file_name: MD5_FILE_NAME)
+ append_metadata_file(content: file_md5, file_name: MD5_FILE_NAME) unless Gitlab::FIPS.enabled?
append_metadata_file(content: file_sha1, file_name: SHA1_FILE_NAME)
append_metadata_file(content: file_sha256, file_name: SHA256_FILE_NAME)
append_metadata_file(content: file_sha512, file_name: SHA512_FILE_NAME)
@@ -70,6 +70,8 @@ module Packages
end
def digest_from(content, type)
+ return if type == :md5 && Gitlab::FIPS.enabled?
+
digest_class = case type
when :md5
Digest::MD5
diff --git a/app/services/packages/rubygems/create_gemspec_service.rb b/app/services/packages/rubygems/create_gemspec_service.rb
index 22533264480..52642aeb0a0 100644
--- a/app/services/packages/rubygems/create_gemspec_service.rb
+++ b/app/services/packages/rubygems/create_gemspec_service.rb
@@ -24,12 +24,14 @@ module Packages
file.write(content)
file.flush
+ md5 = Gitlab::FIPS.enabled? ? nil : Digest::MD5.hexdigest(content)
+
package.package_files.create!(
file: file,
size: file.size,
file_name: "#{gemspec.name}.gemspec",
file_sha1: Digest::SHA1.hexdigest(content),
- file_md5: Digest::MD5.hexdigest(content),
+ file_md5: md5,
file_sha256: Digest::SHA256.hexdigest(content)
)
ensure
diff --git a/app/services/pages/delete_service.rb b/app/services/pages/delete_service.rb
index 8d33e6c1000..95e99daeb6c 100644
--- a/app/services/pages/delete_service.rb
+++ b/app/services/pages/delete_service.rb
@@ -11,7 +11,20 @@ module Pages
# > The default strategy is :nullify which sets the foreign keys to NULL.
PagesDomain.for_project(project).delete_all
+ publish_deleted_event
+
DestroyPagesDeploymentsWorker.perform_async(project.id)
end
+
+ private
+
+ def publish_deleted_event
+ event = Pages::PageDeletedEvent.new(data: {
+ project_id: project.id,
+ namespace_id: project.namespace_id
+ })
+
+ Gitlab::EventStore.publish(event)
+ end
end
end
diff --git a/app/services/pages_domains/create_acme_order_service.rb b/app/services/pages_domains/create_acme_order_service.rb
index c600f497fa5..e289a78091b 100644
--- a/app/services/pages_domains/create_acme_order_service.rb
+++ b/app/services/pages_domains/create_acme_order_service.rb
@@ -2,6 +2,9 @@
module PagesDomains
class CreateAcmeOrderService
+ # elliptic curve algorithm to generate the private key
+ ECDSA_CURVE = "prime256v1"
+
attr_reader :pages_domain
def initialize(pages_domain)
@@ -14,7 +17,12 @@ module PagesDomains
challenge = order.new_challenge
- private_key = OpenSSL::PKey::RSA.new(4096)
+ private_key = if Feature.enabled?(:pages_lets_encrypt_ecdsa, pages_domain.project)
+ OpenSSL::PKey::EC.generate(ECDSA_CURVE)
+ else
+ OpenSSL::PKey::RSA.new(4096)
+ end
+
saved_order = pages_domain.acme_orders.create!(
url: order.url,
expires_at: order.expires,
diff --git a/app/services/projects/after_rename_service.rb b/app/services/projects/after_rename_service.rb
index a3d54bc6b58..2ed4346e5ca 100644
--- a/app/services/projects/after_rename_service.rb
+++ b/app/services/projects/after_rename_service.rb
@@ -95,20 +95,6 @@ module Projects
.new
.rename_project(path_before, project_path, namespace_full_path)
end
-
- if project.pages_deployed?
- # Block will be evaluated in the context of project so we need
- # to bind to a local variable to capture it, as the instance
- # variable and method aren't available on Project
- path_before_local = @path_before
-
- project.run_after_commit_or_now do
- Gitlab::PagesTransfer
- .new
- .async
- .rename_project(path_before_local, path, namespace.full_path)
- end
- end
end
def log_completion
diff --git a/app/services/projects/destroy_rollback_service.rb b/app/services/projects/destroy_rollback_service.rb
deleted file mode 100644
index 7f0ca63a406..00000000000
--- a/app/services/projects/destroy_rollback_service.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- class DestroyRollbackService < BaseService
- include Gitlab::ShellAdapter
-
- def execute
- return unless project
-
- Projects::ForksCountService.new(project).delete_cache
-
- unless rollback_repository(project.repository)
- raise_error(s_('DeleteProject|Failed to restore project repository. Please contact the administrator.'))
- end
-
- unless rollback_repository(project.wiki.repository)
- raise_error(s_('DeleteProject|Failed to restore wiki repository. Please contact the administrator.'))
- end
- end
-
- private
-
- def rollback_repository(repository)
- return true unless repository
-
- result = Repositories::DestroyRollbackService.new(repository).execute
-
- result[:status] == :success
- end
- end
-end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index a73244c6971..bc5be5bdff3 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -10,11 +10,6 @@ module Projects
def async_execute
project.update_attribute(:pending_delete, true)
- # Ensure no repository +deleted paths are kept,
- # regardless of any issue with the ProjectDestroyWorker
- # job process.
- schedule_stale_repos_removal
-
job_id = ProjectDestroyWorker.perform_async(project.id, current_user.id, params)
log_info("User #{current_user.id} scheduled destruction of project #{project.full_path} with job ID #{job_id}")
end
@@ -109,16 +104,6 @@ module Projects
result[:status] == :success
end
- def schedule_stale_repos_removal
- repos = [project.repository, project.wiki.repository]
-
- repos.each do |repository|
- next unless repository
-
- Repositories::ShellDestroyService.new(repository).execute(Repositories::ShellDestroyService::STALE_REMOVAL_DELAY)
- end
- end
-
def attempt_rollback(project, message)
return unless project
@@ -191,6 +176,10 @@ module Projects
# rubocop: enable CodeReuse/ActiveRecord
def destroy_ci_records!
+ # Make sure to destroy this first just in case the project is undergoing stats refresh.
+ # This is to avoid logging the artifact deletion in Ci::JobArtifacts::DestroyBatchService.
+ project.build_artifacts_size_refresh&.destroy
+
project.all_pipelines.find_each(batch_size: BATCH_SIZE) do |pipeline| # rubocop: disable CodeReuse/ActiveRecord
# Destroy artifacts, then builds, then pipelines
# All builds have already been dropped by Ci::AbortPipelinesService,
diff --git a/app/services/projects/import_export/export_service.rb b/app/services/projects/import_export/export_service.rb
index 72492b6f5a5..d8d35422590 100644
--- a/app/services/projects/import_export/export_service.rb
+++ b/app/services/projects/import_export/export_service.rb
@@ -143,7 +143,12 @@ module Projects
project_id: project.id
)
- notification_service.project_not_exported(project, current_user, shared.errors)
+ user = current_user
+ errors = shared.errors
+
+ project.run_after_commit_or_now do |project|
+ NotificationService.new.project_not_exported(project, user, errors)
+ end
end
end
end
diff --git a/app/services/projects/open_issues_count_service.rb b/app/services/projects/open_issues_count_service.rb
index ee4d559e612..925512f31d7 100644
--- a/app/services/projects/open_issues_count_service.rb
+++ b/app/services/projects/open_issues_count_service.rb
@@ -63,12 +63,12 @@ module Projects
# rubocop: disable CodeReuse/ActiveRecord
def self.query(projects, public_only: true)
- issues_filtered_by_type = Issue.opened.with_issue_type(Issue::TYPES_FOR_LIST)
+ open_issues = Issue.opened
if public_only
- issues_filtered_by_type.public_only.where(project: projects)
+ open_issues.public_only.where(project: projects)
else
- issues_filtered_by_type.where(project: projects)
+ open_issues.where(project: projects)
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index b66435d013b..d01e96a1a2d 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -112,7 +112,7 @@ module Projects
integration = project.find_or_initialize_integration(::Integrations::Prometheus.to_param)
integration.assign_attributes(attrs)
- attrs = integration.to_integration_hash.except('created_at', 'updated_at')
+ attrs = integration.to_database_hash
{ prometheus_integration_attributes: attrs }
end
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index 2ad5c303be2..666227951c6 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -120,7 +120,6 @@ module Projects
# Overridden in EE
def post_update_hooks(project)
- move_pages(project)
ensure_personal_project_owner_membership(project)
end
@@ -232,13 +231,6 @@ module Projects
)
end
- def move_pages(project)
- return unless project.pages_deployed?
-
- transfer = Gitlab::PagesTransfer.new.async
- transfer.move_project(project.path, @old_namespace.full_path, @new_namespace.full_path)
- end
-
def old_wiki_repo_path
"#{old_path}#{::Gitlab::GlRepository::WIKI.path_suffix}"
end
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index c6ea364320f..8ded2516b97 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -3,6 +3,7 @@
module Projects
class UpdatePagesService < BaseService
InvalidStateError = Class.new(StandardError)
+ WrongUploadedDeploymentSizeError = Class.new(StandardError)
BLOCK_SIZE = 32.kilobytes
PUBLIC_DIR = 'public'
@@ -39,6 +40,9 @@ module Projects
end
rescue InvalidStateError => e
error(e.message)
+ rescue WrongUploadedDeploymentSizeError => e
+ error("Uploading artifacts to pages storage failed")
+ raise e
rescue StandardError => e
error(e.message)
raise e
@@ -80,6 +84,10 @@ module Projects
ci_build_id: build.id
)
+ if deployment.size != file.size || deployment.file.size != file.size
+ raise(WrongUploadedDeploymentSizeError)
+ end
+
validate_outdated_sha!
project.update_pages_deployment!(deployment)
diff --git a/app/services/releases/base_service.rb b/app/services/releases/base_service.rb
index 249333e6d13..7fb59dad508 100644
--- a/app/services/releases/base_service.rb
+++ b/app/services/releases/base_service.rb
@@ -19,6 +19,10 @@ module Releases
params[:tag]
end
+ def tag_message
+ params[:tag_message]
+ end
+
def ref
params[:ref]
end
diff --git a/app/services/releases/create_service.rb b/app/services/releases/create_service.rb
index caa6a003205..e3134070231 100644
--- a/app/services/releases/create_service.rb
+++ b/app/services/releases/create_service.rb
@@ -34,7 +34,7 @@ module Releases
result = Tags::CreateService
.new(project, current_user)
- .execute(tag_name, ref, nil)
+ .execute(tag_name, ref, tag_message)
return result unless result[:status] == :success
diff --git a/app/services/repositories/base_service.rb b/app/services/repositories/base_service.rb
index 13ad126f8f0..4d7e4ffe267 100644
--- a/app/services/repositories/base_service.rb
+++ b/app/services/repositories/base_service.rb
@@ -3,8 +3,6 @@
class Repositories::BaseService < BaseService
include Gitlab::ShellAdapter
- DELETED_FLAG = '+deleted'
-
attr_reader :repository
delegate :container, :disk_path, :full_path, to: :repository
@@ -21,16 +19,6 @@ class Repositories::BaseService < BaseService
gitlab_shell.mv_repository(repository.shard, from_path, to_path)
end
- # Build a path for removing repositories
- # We use `+` because its not allowed by GitLab so user can not create
- # project with name cookies+119+deleted and capture someone stalled repository
- #
- # gitlab/cookies.git -> gitlab/cookies+119+deleted.git
- #
- def removal_path
- "#{disk_path}+#{container.id}#{DELETED_FLAG}"
- end
-
# If we get a Gitaly error, the repository may be corrupted. We can
# ignore these errors since we're going to trash the repositories
# anyway.
diff --git a/app/services/repositories/changelog_service.rb b/app/services/repositories/changelog_service.rb
index eafd9d7a55e..7a78b323453 100644
--- a/app/services/repositories/changelog_service.rb
+++ b/app/services/repositories/changelog_service.rb
@@ -6,6 +6,19 @@ module Repositories
DEFAULT_TRAILER = 'Changelog'
DEFAULT_FILE = 'CHANGELOG.md'
+ # The maximum number of commits allowed to fetch in `from` and `to` range.
+ #
+ # This value is arbitrarily chosen. Increasing it means more Gitaly calls
+ # and more presure on Gitaly services.
+ #
+ # This number is 3x of the average number of commits per GitLab releases.
+ # Some examples for GitLab's own releases:
+ #
+ # * 13.6.0: 4636 commits
+ # * 13.5.0: 5912 commits
+ # * 13.4.0: 5541 commits
+ COMMITS_LIMIT = 15_000
+
# The `project` specifies the `Project` to generate the changelog section
# for.
#
@@ -75,6 +88,8 @@ module Repositories
commits =
ChangelogCommitsFinder.new(project: @project, from: from, to: @to)
+ verify_commit_range!(from, @to)
+
commits.each_page(@trailer) do |page|
mrs = mrs_finder.execute(page)
@@ -82,6 +97,9 @@ module Repositories
# batch of commits, instead of needing a query for every commit.
page.each(&:lazy_author)
+ # Preload author permissions
+ @project.team.max_member_access_for_user_ids(page.map(&:author).compact.map(&:id))
+
page.each do |commit|
release.add_entry(
title: commit.title,
@@ -117,5 +135,19 @@ module Repositories
'could be found to use instead'
)
end
+
+ def verify_commit_range!(from, to)
+ return unless Feature.enabled?(:changelog_commits_limitation, @project)
+
+ commits = @project.repository.commits_by(oids: [from, to])
+
+ raise Gitlab::Changelog::Error, "Invalid or not found commit value in the given range" unless commits.count == 2
+
+ _, commits_count = @project.repository.diverging_commit_count(from, to)
+
+ if commits_count > COMMITS_LIMIT
+ raise Gitlab::Changelog::Error, "The commits range exceeds #{COMMITS_LIMIT} elements."
+ end
+ end
end
end
diff --git a/app/services/repositories/destroy_rollback_service.rb b/app/services/repositories/destroy_rollback_service.rb
deleted file mode 100644
index a19e305607f..00000000000
--- a/app/services/repositories/destroy_rollback_service.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-class Repositories::DestroyRollbackService < Repositories::BaseService
- def execute
- # There is a possibility project does not have repository or wiki
- return success unless repo_exists?(removal_path)
-
- # Flush the cache for both repositories.
- ignore_git_errors { repository.before_delete }
-
- if mv_repository(removal_path, disk_path)
- log_info(%Q{Repository "#{removal_path}" moved to "#{disk_path}" for repository "#{full_path}"})
-
- success
- elsif repo_exists?(removal_path)
- # If the repo does not exist, there is no need to return an
- # error because there was nothing to do.
- move_error(removal_path)
- else
- success
- end
- rescue Gitlab::Git::Repository::NoRepository
- success
- end
-end
diff --git a/app/services/repositories/destroy_service.rb b/app/services/repositories/destroy_service.rb
index c5a0af56066..a87b8d09244 100644
--- a/app/services/repositories/destroy_service.rb
+++ b/app/services/repositories/destroy_service.rb
@@ -10,31 +10,25 @@ class Repositories::DestroyService < Repositories::BaseService
# Git data (e.g. a list of branch names).
ignore_git_errors { repository.before_delete }
- if mv_repository(disk_path, removal_path)
- log_info(%Q{Repository "#{disk_path}" moved to "#{removal_path}" for repository "#{full_path}"})
+ # Use variables that aren't methods on Project, because they are used in a callback
+ current_storage = repository.shard
+ current_path = "#{disk_path}.git"
- current_repository = repository
-
- # Because GitlabShellWorker is inside a run_after_commit callback it will
- # never be triggered on a read-only instance.
- #
- # Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/223272
- if Gitlab::Database.read_only?
- Repositories::ShellDestroyService.new(current_repository).execute
- else
- container.run_after_commit do
- Repositories::ShellDestroyService.new(current_repository).execute
- end
+ # Because #remove happens inside a run_after_commit callback it will
+ # never be triggered on a read-only instance.
+ #
+ # Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/223272
+ if Gitlab::Database.read_only?
+ Gitlab::Git::Repository.new(current_storage, current_path, nil, nil).remove
+ else
+ container.run_after_commit do
+ Gitlab::Git::Repository.new(current_storage, current_path, nil, nil).remove
end
+ end
- log_info("Repository \"#{full_path}\" was removed")
+ log_info("Repository \"#{full_path}\" was removed")
- success
- elsif repo_exists?(disk_path)
- move_error(disk_path)
- else
- success
- end
+ success
rescue Gitlab::Git::Repository::NoRepository
success
end
diff --git a/app/services/repositories/shell_destroy_service.rb b/app/services/repositories/shell_destroy_service.rb
deleted file mode 100644
index d25cb28c6d7..00000000000
--- a/app/services/repositories/shell_destroy_service.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-class Repositories::ShellDestroyService < Repositories::BaseService
- REPO_REMOVAL_DELAY = 5.minutes.to_i
- STALE_REMOVAL_DELAY = REPO_REMOVAL_DELAY * 2
-
- def execute(delay = REPO_REMOVAL_DELAY)
- return success unless repository
-
- GitlabShellWorker.perform_in(delay,
- :remove_repository,
- repository.shard,
- removal_path)
- end
-end
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index f7ffe288d57..316e6367aa7 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -12,13 +12,15 @@ module ResourceAccessTokens
def execute
return error("User does not have permission to create #{resource_type} access token") unless has_permission_to_create?
+ access_level = params[:access_level] || Gitlab::Access::MAINTAINER
+ return error("Could not provision owner access to project access token") if do_not_allow_owner_access_level_for_project_bot?(access_level)
+
user = create_user
return error(user.errors.full_messages.to_sentence) unless user.persisted?
user.update!(external: true) if current_user.external?
- access_level = params[:access_level] || Gitlab::Access::MAINTAINER
member = create_membership(resource, user, access_level)
unless member.persisted?
@@ -120,6 +122,12 @@ module ResourceAccessTokens
def success(access_token)
ServiceResponse.success(payload: { access_token: access_token })
end
+
+ def do_not_allow_owner_access_level_for_project_bot?(access_level)
+ resource.is_a?(Project) &&
+ access_level == Gitlab::Access::OWNER &&
+ !current_user.can?(:manage_owners, resource)
+ end
end
end
diff --git a/app/services/resource_events/base_change_timebox_service.rb b/app/services/resource_events/base_change_timebox_service.rb
index d802bbee107..372f1c9d816 100644
--- a/app/services/resource_events/base_change_timebox_service.rb
+++ b/app/services/resource_events/base_change_timebox_service.rb
@@ -22,7 +22,7 @@ module ResourceEvents
end
def build_resource_args
- key = resource.class.name.foreign_key
+ key = resource.class.base_class.name.foreign_key
{
user_id: user.id,
diff --git a/app/services/resource_events/base_synthetic_notes_builder_service.rb b/app/services/resource_events/base_synthetic_notes_builder_service.rb
index 192d40129a3..36de70dc291 100644
--- a/app/services/resource_events/base_synthetic_notes_builder_service.rb
+++ b/app/services/resource_events/base_synthetic_notes_builder_service.rb
@@ -25,8 +25,7 @@ module ResourceEvents
def apply_common_filters(events)
events = apply_pagination(events)
- events = apply_last_fetched_at(events)
- apply_fetch_until(events)
+ apply_last_fetched_at(events)
end
def apply_pagination(events)
@@ -44,12 +43,6 @@ module ResourceEvents
events.created_after(last_fetched_at)
end
- def apply_fetch_until(events)
- return events unless params[:fetch_until].present?
-
- events.created_on_or_before(params[:fetch_until])
- end
-
def resource_parent
strong_memoize(:resource_parent) do
resource.project || resource.group
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index 343fc00a2f0..b850592f7ba 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -10,8 +10,9 @@ module ServicePing
SubmissionError = Class.new(StandardError)
- def initialize(skip_db_write: false)
+ def initialize(skip_db_write: false, payload: nil)
@skip_db_write = skip_db_write
+ @payload = payload
end
def execute
@@ -19,7 +20,7 @@ module ServicePing
start = Time.current
begin
- usage_data = ServicePing::BuildPayload.new.execute
+ usage_data = payload || ServicePing::BuildPayload.new.execute
response = submit_usage_data_payload(usage_data)
rescue StandardError => e
return unless Gitlab::CurrentSettings.usage_ping_enabled?
@@ -34,7 +35,7 @@ module ServicePing
}
submit_payload({ error: error_payload }, path: ERROR_PATH)
- usage_data = Gitlab::Usage::ServicePingReport.for(output: :all_metrics_values)
+ usage_data = payload || Gitlab::Usage::ServicePingReport.for(output: :all_metrics_values)
response = submit_usage_data_payload(usage_data)
end
@@ -45,7 +46,7 @@ module ServicePing
raise SubmissionError, "Invalid usage_data_id in response: #{version_usage_data_id}"
end
- unless @skip_db_write
+ unless skip_db_write
raw_usage_data = save_raw_usage_data(usage_data)
raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
ServicePing::DevopsReport.new(response).execute
@@ -58,6 +59,8 @@ module ServicePing
private
+ attr_reader :payload, :skip_db_write
+
def metrics_collection_time(payload, parents = [])
return [] unless payload.is_a?(Hash)
diff --git a/app/services/service_response.rb b/app/services/service_response.rb
index 6bc394d2ae2..c7ab75a4426 100644
--- a/app/services/service_response.rb
+++ b/app/services/service_response.rb
@@ -18,6 +18,24 @@ class ServiceResponse
self.http_status = http_status
end
+ def track_exception(as: StandardError, **extra_data)
+ if error?
+ e = as.new(message)
+ Gitlab::ErrorTracking.track_exception(e, extra_data)
+ end
+
+ self
+ end
+
+ def track_and_raise_exception(as: StandardError, **extra_data)
+ if error?
+ e = as.new(message)
+ Gitlab::ErrorTracking.track_and_raise_exception(e, extra_data)
+ end
+
+ self
+ end
+
def [](key)
to_h[key]
end
diff --git a/app/services/snippets/bulk_destroy_service.rb b/app/services/snippets/bulk_destroy_service.rb
index 430e8330b59..6eab9fb320e 100644
--- a/app/services/snippets/bulk_destroy_service.rb
+++ b/app/services/snippets/bulk_destroy_service.rb
@@ -25,11 +25,9 @@ module Snippets
rescue SnippetAccessError
service_response_error("You don't have access to delete these snippets.", 403)
rescue DeleteRepositoryError
- attempt_rollback_repositories
service_response_error('Failed to delete snippet repositories.', 400)
rescue StandardError
# In case the delete operation fails
- attempt_rollback_repositories
service_response_error('Failed to remove snippets.', 400)
end
@@ -55,18 +53,6 @@ module Snippets
end
end
- def attempt_rollback_repositories
- snippets.each do |snippet|
- result = Repositories::DestroyRollbackService.new(snippet.repository).execute
-
- log_rollback_error(snippet) if result[:status] == :error
- end
- end
-
- def log_rollback_error(snippet)
- Gitlab::AppLogger.error("Repository #{snippet.full_path} in path #{snippet.disk_path} could not be rolled back")
- end
-
def service_response_error(message, http_status)
ServiceResponse.error(message: message, http_status: http_status)
end
diff --git a/app/services/snippets/destroy_service.rb b/app/services/snippets/destroy_service.rb
index 96157434462..6c62d7d5e45 100644
--- a/app/services/snippets/destroy_service.rb
+++ b/app/services/snippets/destroy_service.rb
@@ -31,7 +31,6 @@ module Snippets
rescue DestroyError
service_response_error('Failed to remove snippet repository.', 400)
rescue StandardError
- attempt_rollback_repository
service_response_error('Failed to remove snippet.', 400)
end
@@ -45,10 +44,6 @@ module Snippets
snippet.destroy!
end
- def attempt_rollback_repository
- Repositories::DestroyRollbackService.new(snippet.repository).execute
- end
-
def user_can_delete_snippet?
can?(current_user, :admin_snippet, snippet)
end
diff --git a/app/services/static_site_editor/config_service.rb b/app/services/static_site_editor/config_service.rb
deleted file mode 100644
index c8e7165e076..00000000000
--- a/app/services/static_site_editor/config_service.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-module StaticSiteEditor
- class ConfigService < ::BaseContainerService
- ValidationError = Class.new(StandardError)
-
- def initialize(container:, current_user: nil, params: {})
- super
-
- @project = container
- @repository = project.repository
- @ref = params.fetch(:ref)
- end
-
- def execute
- check_access!
-
- file_config = load_file_config!
- file_data = file_config.to_hash_with_defaults
- generated_data = load_generated_config.data
-
- check_for_duplicate_keys!(generated_data, file_data)
- data = merged_data(generated_data, file_data)
-
- ServiceResponse.success(payload: data)
- rescue ValidationError => e
- ServiceResponse.error(message: e.message)
- rescue StandardError => e
- Gitlab::ErrorTracking.track_and_raise_exception(e)
- end
-
- private
-
- attr_reader :project, :repository, :ref
-
- def static_site_editor_config_file
- '.gitlab/static-site-editor.yml'
- end
-
- def check_access!
- unless can?(current_user, :download_code, project)
- raise ValidationError, 'Insufficient permissions to read configuration'
- end
- end
-
- def load_file_config!
- yaml = yaml_from_repo.presence || '{}'
- file_config = Gitlab::StaticSiteEditor::Config::FileConfig.new(yaml)
-
- unless file_config.valid?
- raise ValidationError, file_config.errors.first
- end
-
- file_config
- rescue Gitlab::StaticSiteEditor::Config::FileConfig::ConfigError => e
- raise ValidationError, e.message
- end
-
- def load_generated_config
- Gitlab::StaticSiteEditor::Config::GeneratedConfig.new(
- repository,
- ref,
- params.fetch(:path),
- params[:return_url]
- )
- end
-
- def check_for_duplicate_keys!(generated_data, file_data)
- duplicate_keys = generated_data.keys & file_data.keys
- raise ValidationError, "Duplicate key(s) '#{duplicate_keys}' found." if duplicate_keys.present?
- end
-
- def merged_data(generated_data, file_data)
- generated_data.merge(file_data)
- end
-
- def yaml_from_repo
- repository.blob_data_at(ref, static_site_editor_config_file)
- rescue GRPC::NotFound
- # Return nil in the case of a GRPC::NotFound exception, so the default config will be used.
- # Allow any other unexpected exception will be tracked and re-raised.
- nil
- end
- end
-end
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index 89212288a6b..f9e5c3725d8 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -2,6 +2,18 @@
module SystemNotes
class IssuablesService < ::SystemNotes::BaseService
+ # We create cross-referenced system notes when a commit relates to an issue.
+ # There are two options what time to use for the system note:
+ # 1. The push date (default)
+ # 2. The commit date
+ #
+ # The commit date is useful when an existing Git repository is imported to GitLab.
+ # It helps to preserve an original order of all notes (comments, commits, status changes, e.t.c)
+ # in the imported issues. Otherwise, all commits will be linked before or after all other imported notes.
+ #
+ # See also the discussion in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60700#note_612724683
+ USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE = false
+
#
# noteable_ref - Referenced noteable object
#
@@ -216,7 +228,8 @@ module SystemNotes
)
else
track_cross_reference_action
- create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference'))
+ created_at = mentioner.created_at if USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE && mentioner.is_a?(Commit)
+ create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference', created_at: created_at))
end
end
diff --git a/app/services/system_notes/merge_requests_service.rb b/app/services/system_notes/merge_requests_service.rb
index 546a23c95c2..7758c1e8597 100644
--- a/app/services/system_notes/merge_requests_service.rb
+++ b/app/services/system_notes/merge_requests_service.rb
@@ -27,7 +27,7 @@ module SystemNotes
end
def handle_merge_request_draft
- action = noteable.work_in_progress? ? "draft" : "ready"
+ action = noteable.draft? ? "draft" : "ready"
body = "marked this merge request as **#{action}**"
diff --git a/app/services/terraform/remote_state_handler.rb b/app/services/terraform/remote_state_handler.rb
index f13477b8b34..849afaddec6 100644
--- a/app/services/terraform/remote_state_handler.rb
+++ b/app/services/terraform/remote_state_handler.rb
@@ -3,6 +3,7 @@
module Terraform
class RemoteStateHandler < BaseService
StateLockedError = Class.new(StandardError)
+ StateDeletedError = Class.new(StandardError)
UnauthorizedError = Class.new(StandardError)
def find_with_lock
@@ -66,14 +67,15 @@ module Terraform
find_params = { project: project, name: params[:name] }
- return find_state!(find_params) if find_only
+ state = if find_only
+ find_state!(find_params)
+ else
+ Terraform::State.create_or_find_by(find_params)
+ end
- state = Terraform::State.create_or_find_by(find_params)
+ raise StateDeletedError if state.deleted_at?
- # https://github.com/rails/rails/issues/36027
- return state unless state.errors.of_kind? :name, :taken
-
- find_state(find_params)
+ state
end
def lock_matches?(state)
diff --git a/app/services/terraform/states/destroy_service.rb b/app/services/terraform/states/destroy_service.rb
new file mode 100644
index 00000000000..728533a60ed
--- /dev/null
+++ b/app/services/terraform/states/destroy_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Terraform
+ module States
+ class DestroyService
+ def initialize(state)
+ @state = state
+ end
+
+ def execute
+ return unless state.deleted_at?
+
+ state.versions.each_batch(column: :version) do |batch|
+ process_batch(batch)
+ end
+
+ state.destroy!
+ end
+
+ private
+
+ attr_reader :state
+
+ # Overridden in EE
+ def process_batch(batch)
+ batch.each do |version|
+ version.file.remove!
+ end
+ end
+ end
+ end
+end
+
+Terraform::States::DestroyService.prepend_mod
diff --git a/app/services/terraform/states/trigger_destroy_service.rb b/app/services/terraform/states/trigger_destroy_service.rb
new file mode 100644
index 00000000000..3669bdcf716
--- /dev/null
+++ b/app/services/terraform/states/trigger_destroy_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Terraform
+ module States
+ class TriggerDestroyService
+ def initialize(state, current_user:)
+ @state = state
+ @current_user = current_user
+ end
+
+ def execute
+ return unauthorized_response unless can_destroy_state?
+ return state_locked_response if state.locked?
+
+ state.update!(deleted_at: Time.current)
+
+ Terraform::States::DestroyWorker.perform_async(state.id)
+
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :state, :current_user
+
+ def can_destroy_state?
+ current_user.can?(:admin_terraform_state, state.project)
+ end
+
+ def unauthorized_response
+ error_response(s_('Terraform|You have insufficient permissions to delete this state'))
+ end
+
+ def state_locked_response
+ error_response(s_('Terraform|Cannot remove a locked state'))
+ end
+
+ def error_response(message)
+ ServiceResponse.error(message: message)
+ end
+ end
+ end
+end
diff --git a/app/services/two_factor/destroy_service.rb b/app/services/two_factor/destroy_service.rb
index b8bbe215d6e..859012c2153 100644
--- a/app/services/two_factor/destroy_service.rb
+++ b/app/services/two_factor/destroy_service.rb
@@ -8,7 +8,7 @@ module TwoFactor
result = disable_two_factor
- notification_service.disabled_two_factor(user) if result[:status] == :success
+ notify_on_success(user) if result[:status] == :success
result
end
@@ -20,5 +20,11 @@ module TwoFactor
user.disable_two_factor!
end
end
+
+ def notify_on_success(user)
+ notification_service.disabled_two_factor(user)
+ end
end
end
+
+TwoFactor::DestroyService.prepend_mod_with('TwoFactor::DestroyService')
diff --git a/app/services/user_project_access_changed_service.rb b/app/services/user_project_access_changed_service.rb
index 5bba986f4ad..ceaf21bb926 100644
--- a/app/services/user_project_access_changed_service.rb
+++ b/app/services/user_project_access_changed_service.rb
@@ -2,8 +2,10 @@
class UserProjectAccessChangedService
DELAY = 1.hour
+ MEDIUM_DELAY = 10.minutes
HIGH_PRIORITY = :high
+ MEDIUM_PRIORITY = :medium
LOW_PRIORITY = :low
def initialize(user_ids)
@@ -11,6 +13,8 @@ class UserProjectAccessChangedService
end
def execute(blocking: true, priority: HIGH_PRIORITY)
+ return if @user_ids.empty?
+
bulk_args = @user_ids.map { |id| [id] }
result =
@@ -19,6 +23,8 @@ class UserProjectAccessChangedService
else
if priority == HIGH_PRIORITY
AuthorizedProjectsWorker.bulk_perform_async(bulk_args) # rubocop:disable Scalability/BulkPerformWithContext
+ elsif priority == MEDIUM_PRIORITY
+ AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker.bulk_perform_in(MEDIUM_DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds) # rubocop:disable Scalability/BulkPerformWithContext
else
with_related_class_context do
# We wrap the execution in `with_related_class_context`so as to obtain
diff --git a/app/services/web_hook_service.rb b/app/services/web_hook_service.rb
index c0727e52cc3..f2f94563e56 100644
--- a/app/services/web_hook_service.rb
+++ b/app/services/web_hook_service.rb
@@ -26,6 +26,12 @@ class WebHookService
end
REQUEST_BODY_SIZE_LIMIT = 25.megabytes
+ # Response body is for UI display only. It does not make much sense to save
+ # whatever the receivers throw back at us
+ RESPONSE_BODY_SIZE_LIMIT = 8.kilobytes
+ # The headers are for debugging purpose. They are displayed on the UI only.
+ RESPONSE_HEADERS_COUNT_LIMIT = 50
+ RESPONSE_HEADERS_SIZE_LIMIT = 1.kilobytes
attr_accessor :hook, :data, :hook_name, :request_options
attr_reader :uniqueness_token
@@ -98,7 +104,7 @@ class WebHookService
def async_execute
Gitlab::ApplicationContext.with_context(hook.application_context) do
- break log_rate_limited if rate_limited?
+ break log_rate_limited if rate_limit!
break log_recursion_blocked if recursion_blocked?
params = {
@@ -141,7 +147,7 @@ class WebHookService
execution_duration: execution_duration,
request_headers: build_headers,
request_data: data,
- response_headers: format_response_headers(response),
+ response_headers: safe_response_headers(response),
response_body: safe_response_body(response),
response_status: response.code,
internal_error_message: error_message
@@ -150,8 +156,21 @@ class WebHookService
if @force # executed as part of test - run log-execution inline.
::WebHooks::LogExecutionService.new(hook: hook, log_data: log_data, response_category: category).execute
else
- ::WebHooks::LogExecutionWorker
- .perform_async(hook.id, log_data, category, uniqueness_token)
+ queue_log_execution_with_retry(log_data, category)
+ end
+ end
+
+ def queue_log_execution_with_retry(log_data, category)
+ retried = false
+ begin
+ ::WebHooks::LogExecutionWorker.perform_async(hook.id, log_data, category, uniqueness_token)
+ rescue Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError
+ raise if retried
+
+ # Strip request data
+ log_data[:request_data] = ::WebHookLog::OVERSIZE_REQUEST_DATA
+ retried = true
+ retry
end
end
@@ -181,52 +200,56 @@ class WebHookService
# Make response headers more stylish
# Net::HTTPHeader has downcased hash with arrays: { 'content-type' => ['text/html; charset=utf-8'] }
# This method format response to capitalized hash with strings: { 'Content-Type' => 'text/html; charset=utf-8' }
- def format_response_headers(response)
- response.headers.each_capitalized.to_h
+ # rubocop:disable Style/HashTransformValues
+ def safe_response_headers(response)
+ response.headers.each_capitalized.first(RESPONSE_HEADERS_COUNT_LIMIT).to_h do |header_key, header_value|
+ [enforce_utf8(header_key), string_size_limit(enforce_utf8(header_value), RESPONSE_HEADERS_SIZE_LIMIT)]
+ end
end
+ # rubocop:enable Style/HashTransformValues
def safe_response_body(response)
return '' unless response.body
- response.body.encode('UTF-8', invalid: :replace, undef: :replace, replace: '')
+ response_body = enforce_utf8(response.body)
+ string_size_limit(response_body, RESPONSE_BODY_SIZE_LIMIT)
end
- def rate_limited?
- return false if rate_limit.nil?
-
- Gitlab::ApplicationRateLimiter.throttled?(
- :web_hook_calls,
- scope: [hook],
- threshold: rate_limit
- )
+ # Increments rate-limit counter.
+ # Returns true if hook should be rate-limited.
+ def rate_limit!
+ Gitlab::WebHooks::RateLimiter.new(hook).rate_limit!
end
def recursion_blocked?
Gitlab::WebHooks::RecursionDetection.block?(hook)
end
- def rate_limit
- @rate_limit ||= hook.rate_limit
+ def log_rate_limited
+ log_auth_error('Webhook rate limit exceeded')
end
- def log_rate_limited
- Gitlab::AuthLogger.error(
- message: 'Webhook rate limit exceeded',
- hook_id: hook.id,
- hook_type: hook.type,
- hook_name: hook_name,
- **Gitlab::ApplicationContext.current
+ def log_recursion_blocked
+ log_auth_error(
+ 'Recursive webhook blocked from executing',
+ recursion_detection: ::Gitlab::WebHooks::RecursionDetection.to_log(hook)
)
end
- def log_recursion_blocked
+ def log_auth_error(message, params = {})
Gitlab::AuthLogger.error(
- message: 'Recursive webhook blocked from executing',
- hook_id: hook.id,
- hook_type: hook.type,
- hook_name: hook_name,
- recursion_detection: ::Gitlab::WebHooks::RecursionDetection.to_log(hook),
- **Gitlab::ApplicationContext.current
+ params.merge(
+ { message: message, hook_id: hook.id, hook_type: hook.type, hook_name: hook_name },
+ Gitlab::ApplicationContext.current
+ )
)
end
+
+ def string_size_limit(str, limit)
+ str.truncate_bytes(limit)
+ end
+
+ def enforce_utf8(str)
+ Gitlab::EncodingHelper.encode_utf8(str)
+ end
end
diff --git a/app/services/web_hooks/destroy_service.rb b/app/services/web_hooks/destroy_service.rb
index 58117985b56..ecb530f0d2a 100644
--- a/app/services/web_hooks/destroy_service.rb
+++ b/app/services/web_hooks/destroy_service.rb
@@ -2,77 +2,27 @@
module WebHooks
class DestroyService
- include BaseServiceUtility
-
- BATCH_SIZE = 1000
- LOG_COUNT_THRESHOLD = 10000
-
- DestroyError = Class.new(StandardError)
-
- attr_accessor :current_user, :web_hook
+ attr_accessor :current_user
def initialize(current_user)
@current_user = current_user
end
+ # Destroy the hook immediately, schedule the logs for deletion
def execute(web_hook)
- @web_hook = web_hook
-
- async = false
- # For a better user experience, it's better if the Web hook is
- # destroyed right away without waiting for Sidekiq. However, if
- # there are a lot of Web hook logs, we will need more time to
- # clean them up, so schedule a Sidekiq job to do this.
- if needs_async_destroy?
- Gitlab::AppLogger.info("User #{current_user&.id} scheduled a deletion of hook ID #{web_hook.id}")
- async_destroy(web_hook)
- async = true
- else
- sync_destroy(web_hook)
- end
-
- success({ async: async })
- end
-
- def sync_destroy(web_hook)
- @web_hook = web_hook
+ hook_id = web_hook.id
- delete_web_hook_logs
- result = web_hook.destroy
+ if web_hook.destroy
+ WebHooks::LogDestroyWorker.perform_async({ 'hook_id' => hook_id })
+ Gitlab::AppLogger.info("User #{current_user&.id} scheduled a deletion of logs for hook ID #{hook_id}")
- if result
- success({ async: false })
+ ServiceResponse.success(payload: { async: false })
else
- error("Unable to destroy #{web_hook.model_name.human}")
+ ServiceResponse.error(message: "Unable to destroy #{web_hook.model_name.human}")
end
end
- private
-
- def async_destroy(web_hook)
- WebHooks::DestroyWorker.perform_async(current_user.id, web_hook.id)
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def needs_async_destroy?
- web_hook.web_hook_logs.limit(LOG_COUNT_THRESHOLD).count == LOG_COUNT_THRESHOLD
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def delete_web_hook_logs
- loop do
- count = delete_web_hook_logs_in_batches
- break if count < BATCH_SIZE
- end
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def delete_web_hook_logs_in_batches
- # We can't use EachBatch because that does an ORDER BY id, which can
- # easily time out. We don't actually care about ordering when
- # we are deleting these rows.
- web_hook.web_hook_logs.limit(BATCH_SIZE).delete_all
- end
- # rubocop: enable CodeReuse/ActiveRecord
+ # Backwards compatibility with WebHooks::DestroyWorker
+ alias_method :sync_destroy, :execute
end
end
diff --git a/app/services/web_hooks/log_destroy_service.rb b/app/services/web_hooks/log_destroy_service.rb
new file mode 100644
index 00000000000..8a5d214a95e
--- /dev/null
+++ b/app/services/web_hooks/log_destroy_service.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module WebHooks
+ class LogDestroyService
+ BATCH_SIZE = 1000
+
+ def initialize(web_hook_id)
+ @web_hook_id = web_hook_id
+ end
+
+ def execute
+ next while WebHookLog.delete_batch_for(@web_hook_id, batch_size: BATCH_SIZE)
+
+ ServiceResponse.success
+ rescue StandardError => ex
+ ServiceResponse.error(message: ex.message)
+ end
+ end
+end
diff --git a/app/services/work_items/update_service.rb b/app/services/work_items/update_service.rb
index 5c45f4d90e5..0b420881b4b 100644
--- a/app/services/work_items/update_service.rb
+++ b/app/services/work_items/update_service.rb
@@ -2,12 +2,30 @@
module WorkItems
class UpdateService < ::Issues::UpdateService
+ def initialize(project:, current_user: nil, params: {}, spam_params: nil, widget_params: {})
+ super(project: project, current_user: current_user, params: params, spam_params: nil)
+
+ @widget_params = widget_params
+ end
+
private
- def after_update(issuable)
+ def update(work_item)
+ execute_widgets(work_item: work_item, callback: :update)
+
super
+ end
+
+ def after_update(work_item)
+ super
+
+ GraphqlTriggers.issuable_title_updated(work_item) if work_item.previous_changes.key?(:title)
+ end
- GraphqlTriggers.issuable_title_updated(issuable) if issuable.previous_changes.key?(:title)
+ def execute_widgets(work_item:, callback:)
+ work_item.widgets.each do |widget|
+ widget.try(callback, params: @widget_params[widget.class.api_symbol])
+ end
end
end
end