Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-09-20 02:18:09 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-09-20 02:18:09 +0300
commit6ed4ec3e0b1340f96b7c043ef51d1b33bbe85fde (patch)
treedc4d20fe6064752c0bd323187252c77e0a89144b /app/services
parent9868dae7fc0655bd7ce4a6887d4e6d487690eeed (diff)
Add latest changes from gitlab-org/gitlab@15-4-stable-eev15.4.0-rc42
Diffstat (limited to 'app/services')
-rw-r--r--app/services/alert_management/process_prometheus_alert_service.rb5
-rw-r--r--app/services/auth/container_registry_authentication_service.rb1
-rw-r--r--app/services/authorized_project_update/find_records_due_for_refresh_service.rb32
-rw-r--r--app/services/boards/base_item_move_service.rb22
-rw-r--r--app/services/boards/issues/move_service.rb4
-rw-r--r--app/services/bulk_imports/file_download_service.rb83
-rw-r--r--app/services/bulk_imports/relation_export_service.rb2
-rw-r--r--app/services/bulk_imports/tree_export_service.rb8
-rw-r--r--app/services/ci/after_requeue_job_service.rb36
-rw-r--r--app/services/ci/archive_trace_service.rb2
-rw-r--r--app/services/ci/build_erase_service.rb49
-rw-r--r--app/services/ci/build_report_result_service.rb3
-rw-r--r--app/services/ci/compare_reports_base_service.rb9
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb15
-rw-r--r--app/services/ci/create_pipeline_service.rb1
-rw-r--r--app/services/ci/delete_objects_service.rb4
-rw-r--r--app/services/ci/expire_pipeline_cache_service.rb2
-rw-r--r--app/services/ci/generate_coverage_reports_service.rb2
-rw-r--r--app/services/ci/job_artifacts/create_service.rb2
-rw-r--r--app/services/ci/job_artifacts/delete_service.rb32
-rw-r--r--app/services/ci/job_artifacts/track_artifact_report_service.rb23
-rw-r--r--app/services/ci/pipeline_artifacts/coverage_report_service.rb8
-rw-r--r--app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb20
-rw-r--r--app/services/ci/pipelines/add_job_service.rb2
-rw-r--r--app/services/ci/queue/pending_builds_strategy.rb6
-rw-r--r--app/services/ci/register_job_service.rb2
-rw-r--r--app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb6
-rw-r--r--app/services/ci/runners/set_runner_associated_projects_service.rb69
-rw-r--r--app/services/ci/runners/update_runner_service.rb9
-rw-r--r--app/services/ci/stuck_builds/drop_helpers.rb2
-rw-r--r--app/services/ci/test_failure_history_service.rb4
-rw-r--r--app/services/ci/unlock_artifacts_service.rb17
-rw-r--r--app/services/commits/create_service.rb2
-rw-r--r--app/services/concerns/alert_management/alert_processing.rb4
-rw-r--r--app/services/concerns/ci/downstream_pipeline_helpers.rb3
-rw-r--r--app/services/concerns/ci/job_token_scope/edit_scope_validations.rb4
-rw-r--r--app/services/concerns/projects/container_repository/gitlab/timeoutable.rb27
-rw-r--r--app/services/container_expiration_policies/cleanup_service.rb2
-rw-r--r--app/services/deployments/update_environment_service.rb14
-rw-r--r--app/services/design_management/copy_design_collection/copy_service.rb4
-rw-r--r--app/services/design_management/delete_designs_service.rb3
-rw-r--r--app/services/design_management/runs_design_actions.rb2
-rw-r--r--app/services/design_management/save_designs_service.rb6
-rw-r--r--app/services/environments/stop_service.rb15
-rw-r--r--app/services/files/multi_service.rb2
-rw-r--r--app/services/google_cloud/create_cloudsql_instance_service.rb2
-rw-r--r--app/services/google_cloud/enable_cloudsql_service.rb2
-rw-r--r--app/services/google_cloud/fetch_google_ip_list_service.rb89
-rw-r--r--app/services/groups/create_service.rb2
-rw-r--r--app/services/import/github_service.rb8
-rw-r--r--app/services/issuable_base_service.rb6
-rw-r--r--app/services/issuable_links/create_service.rb8
-rw-r--r--app/services/issues/base_service.rb3
-rw-r--r--app/services/issues/close_service.rb9
-rw-r--r--app/services/issues/export_csv_service.rb10
-rw-r--r--app/services/issues/relative_position_rebalancing_service.rb2
-rw-r--r--app/services/issues/reopen_service.rb7
-rw-r--r--app/services/labels/transfer_service.rb6
-rw-r--r--app/services/members/update_service.rb9
-rw-r--r--app/services/merge_requests/after_create_service.rb2
-rw-r--r--app/services/merge_requests/approval_service.rb41
-rw-r--r--app/services/merge_requests/base_service.rb43
-rw-r--r--app/services/merge_requests/ff_merge_service.rb30
-rw-r--r--app/services/merge_requests/handle_assignees_change_service.rb2
-rw-r--r--app/services/merge_requests/merge_service.rb23
-rw-r--r--app/services/merge_requests/mergeability/detailed_merge_status_service.rb63
-rw-r--r--app/services/merge_requests/mergeability/logger.rb103
-rw-r--r--app/services/merge_requests/mergeability/run_checks_service.rb13
-rw-r--r--app/services/merge_requests/refresh_service.rb1
-rw-r--r--app/services/merge_requests/update_assignees_service.rb2
-rw-r--r--app/services/merge_requests/update_service.rb14
-rw-r--r--app/services/milestones/transfer_service.rb5
-rw-r--r--app/services/namespaces/in_product_marketing_emails_service.rb2
-rw-r--r--app/services/notification_recipients/builder/base.rb52
-rw-r--r--app/services/onboarding/progress_service.rb33
-rw-r--r--app/services/onboarding_progress_service.rb31
-rw-r--r--app/services/packages/conan/search_service.rb2
-rw-r--r--app/services/packages/debian/generate_distribution_service.rb1
-rw-r--r--app/services/packages/debian/process_changes_service.rb16
-rw-r--r--app/services/packages/rpm/repository_metadata/base_builder.rb20
-rw-r--r--app/services/packages/rpm/repository_metadata/build_filelist_xml.rb14
-rw-r--r--app/services/packages/rpm/repository_metadata/build_other_xml.rb14
-rw-r--r--app/services/packages/rpm/repository_metadata/build_primary_xml.rb15
-rw-r--r--app/services/packages/rpm/repository_metadata/build_repomd_xml.rb59
-rw-r--r--app/services/packages/rubygems/dependency_resolver_service.rb5
-rw-r--r--app/services/post_receive_service.rb2
-rw-r--r--app/services/projects/alerting/notify_service.rb17
-rw-r--r--app/services/projects/blame_service.rb17
-rw-r--r--app/services/projects/container_repository/base_container_repository_service.rb17
-rw-r--r--app/services/projects/container_repository/cleanup_tags_base_service.rb119
-rw-r--r--app/services/projects/container_repository/cleanup_tags_service.rb161
-rw-r--r--app/services/projects/container_repository/gitlab/cleanup_tags_service.rb81
-rw-r--r--app/services/projects/container_repository/gitlab/delete_tags_service.rb15
-rw-r--r--app/services/projects/create_service.rb17
-rw-r--r--app/services/projects/destroy_service.rb26
-rw-r--r--app/services/projects/prometheus/alerts/notify_service.rb20
-rw-r--r--app/services/projects/update_pages_service.rb11
-rw-r--r--app/services/releases/create_service.rb3
-rw-r--r--app/services/resource_events/change_labels_service.rb5
-rw-r--r--app/services/service_ping/submit_service.rb78
-rw-r--r--app/services/service_response.rb26
-rw-r--r--app/services/snippets/base_service.rb9
-rw-r--r--app/services/snippets/bulk_destroy_service.rb4
-rw-r--r--app/services/snippets/create_service.rb3
-rw-r--r--app/services/snippets/update_service.rb5
-rw-r--r--app/services/spam/spam_action_service.rb9
-rw-r--r--app/services/spam/spam_constants.rb1
-rw-r--r--app/services/spam/spam_verdict_service.rb10
-rw-r--r--app/services/system_notes/issuables_service.rb56
-rw-r--r--app/services/system_notes/time_tracking_service.rb14
-rw-r--r--app/services/topics/merge_service.rb13
-rw-r--r--app/services/users/authorized_build_service.rb2
-rw-r--r--app/services/users/destroy_service.rb51
-rw-r--r--app/services/users/email_verification/base_service.rb27
-rw-r--r--app/services/users/email_verification/generate_token_service.rb21
-rw-r--r--app/services/users/email_verification/validate_token_service.rb78
-rw-r--r--app/services/users/migrate_records_to_ghost_user_in_batches_service.rb26
-rw-r--r--app/services/users/migrate_records_to_ghost_user_service.rb111
118 files changed, 1719 insertions, 675 deletions
diff --git a/app/services/alert_management/process_prometheus_alert_service.rb b/app/services/alert_management/process_prometheus_alert_service.rb
index 1b377a3d367..e0594247975 100644
--- a/app/services/alert_management/process_prometheus_alert_service.rb
+++ b/app/services/alert_management/process_prometheus_alert_service.rb
@@ -36,10 +36,5 @@ module AlertManagement
)
end
end
-
- override :resolving_alert?
- def resolving_alert?
- incoming_payload.resolved?
- end
end
end
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index e806bef46fe..509c2d4d544 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -83,6 +83,7 @@ module Auth
token.audience = params[:service]
token.subject = current_user.try(:username)
token.expire_time = self.class.token_expire_at
+ token[:auth_type] = params[:auth_type]
token[:access] = accesses.compact
end
end
diff --git a/app/services/authorized_project_update/find_records_due_for_refresh_service.rb b/app/services/authorized_project_update/find_records_due_for_refresh_service.rb
index 3a2251f15cc..dd696da0447 100644
--- a/app/services/authorized_project_update/find_records_due_for_refresh_service.rb
+++ b/app/services/authorized_project_update/find_records_due_for_refresh_service.rb
@@ -28,31 +28,33 @@ module AuthorizedProjectUpdate
current.except!(*projects_with_duplicates)
remove |= current.each_with_object([]) do |(project_id, row), array|
+ next if fresh[project_id] && fresh[project_id] == row.access_level
+
# rows not in the new list or with a different access level should be
# removed.
- if !fresh[project_id] || fresh[project_id] != row.access_level
- if incorrect_auth_found_callback
- incorrect_auth_found_callback.call(project_id, row.access_level)
- end
- array << row.project_id
+ if incorrect_auth_found_callback
+ incorrect_auth_found_callback.call(project_id, row.access_level)
end
+
+ array << row.project_id
end
add = fresh.each_with_object([]) do |(project_id, level), array|
+ next if current[project_id] && current[project_id].access_level == level
+
# rows not in the old list or with a different access level should be
# added.
- if !current[project_id] || current[project_id].access_level != level
- if missing_auth_found_callback
- missing_auth_found_callback.call(project_id, level)
- end
-
- array << {
- user_id: user.id,
- project_id: project_id,
- access_level: level
- }
+
+ if missing_auth_found_callback
+ missing_auth_found_callback.call(project_id, level)
end
+
+ array << {
+ user_id: user.id,
+ project_id: project_id,
+ access_level: level
+ }
end
[remove, add]
diff --git a/app/services/boards/base_item_move_service.rb b/app/services/boards/base_item_move_service.rb
index 9d711d83fd2..c9da889c536 100644
--- a/app/services/boards/base_item_move_service.rb
+++ b/app/services/boards/base_item_move_service.rb
@@ -2,6 +2,8 @@
module Boards
class BaseItemMoveService < Boards::BaseService
+ LIST_END_POSITION = -1
+
def execute(issuable)
issuable_modification_params = issuable_params(issuable)
return if issuable_modification_params.empty?
@@ -32,7 +34,13 @@ module Boards
)
end
- reposition_ids = move_between_ids(params)
+ move_params = if params[:position_in_list].present?
+ move_params_from_list_position(params[:position_in_list])
+ else
+ params
+ end
+
+ reposition_ids = move_between_ids(move_params)
attrs.merge!(reposition_params(reposition_ids)) if reposition_ids
attrs
@@ -90,6 +98,18 @@ module Boards
::Label.ids_on_board(board.id)
end
+ def move_params_from_list_position(position)
+ if position == LIST_END_POSITION
+ { move_before_id: moving_to_list_items_relation.reverse_order.pick(:id), move_after_id: nil }
+ else
+ item_at_position = moving_to_list_items_relation.offset(position).pick(:id) # rubocop: disable CodeReuse/ActiveRecord
+
+ return move_params_from_list_position(LIST_END_POSITION) if item_at_position.nil?
+
+ { move_before_id: nil, move_after_id: item_at_position }
+ end
+ end
+
def move_between_ids(move_params)
ids = [move_params[:move_before_id], move_params[:move_after_id]]
.map(&:to_i)
diff --git a/app/services/boards/issues/move_service.rb b/app/services/boards/issues/move_service.rb
index 90226b9d4e0..4de4d7c8f69 100644
--- a/app/services/boards/issues/move_service.rb
+++ b/app/services/boards/issues/move_service.rb
@@ -54,6 +54,10 @@ module Boards
def update(issue, issue_modification_params)
::Issues::UpdateService.new(project: issue.project, current_user: current_user, params: issue_modification_params).execute(issue)
end
+
+ def moving_to_list_items_relation
+ Boards::Issues::ListService.new(board.resource_parent, current_user, board_id: board.id, id: moving_to_list.id).execute
+ end
end
end
end
diff --git a/app/services/bulk_imports/file_download_service.rb b/app/services/bulk_imports/file_download_service.rb
index a2c8ba5b1cd..45f1350df92 100644
--- a/app/services/bulk_imports/file_download_service.rb
+++ b/app/services/bulk_imports/file_download_service.rb
@@ -10,10 +10,11 @@
# @param filename [String] Name of the file to download, if known. Use remote filename if none given.
module BulkImports
class FileDownloadService
+ include ::BulkImports::FileDownloads::FilenameFetch
+ include ::BulkImports::FileDownloads::Validations
+
ServiceError = Class.new(StandardError)
- REMOTE_FILENAME_PATTERN = %r{filename="(?<filename>[^"]+)"}.freeze
- FILENAME_SIZE_LIMIT = 255 # chars before the extension
DEFAULT_FILE_SIZE_LIMIT = 5.gigabytes
DEFAULT_ALLOWED_CONTENT_TYPES = %w(application/gzip application/octet-stream).freeze
@@ -74,6 +75,10 @@ module BulkImports
raise e
end
+ def raise_error(message)
+ raise ServiceError, message
+ end
+
def http_client
@http_client ||= BulkImports::Clients::HTTP.new(
url: configuration.url,
@@ -85,24 +90,20 @@ module BulkImports
::Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
- def headers
- @headers ||= http_client.head(relative_url).headers
- end
-
- def validate_filepath
- Gitlab::Utils.check_path_traversal!(filepath)
+ def response_headers
+ @response_headers ||= http_client.head(relative_url).headers
end
def validate_tmpdir
Gitlab::Utils.check_allowed_absolute_path!(tmpdir, [Dir.tmpdir])
end
- def validate_symlink
- if File.lstat(filepath).symlink?
- File.delete(filepath)
+ def filepath
+ @filepath ||= File.join(@tmpdir, filename)
+ end
- raise(ServiceError, 'Invalid downloaded file')
- end
+ def filename
+ @filename.presence || remote_filename
end
def validate_url
@@ -113,61 +114,5 @@ module BulkImports
schemes: %w(http https)
)
end
-
- def validate_content_length
- validate_size!(headers['content-length'])
- end
-
- def validate_size!(size)
- if size.blank?
- raise ServiceError, 'Missing content-length header'
- elsif size.to_i > file_size_limit
- raise ServiceError, "File size %{size} exceeds limit of %{limit}" % {
- size: ActiveSupport::NumberHelper.number_to_human_size(size),
- limit: ActiveSupport::NumberHelper.number_to_human_size(file_size_limit)
- }
- end
- end
-
- def validate_content_type
- content_type = headers['content-type']
-
- raise(ServiceError, 'Invalid content type') if content_type.blank? || allowed_content_types.exclude?(content_type)
- end
-
- def filepath
- @filepath ||= File.join(@tmpdir, filename)
- end
-
- def filename
- @filename.presence || remote_filename
- end
-
- # Fetch the remote filename information from the request content-disposition header
- # - Raises if the filename does not exist
- # - If the filename is longer then 255 chars truncate it
- # to be a total of 255 chars (with the extension)
- def remote_filename
- @remote_filename ||=
- headers['content-disposition'].to_s
- .match(REMOTE_FILENAME_PATTERN) # matches the filename pattern
- .then { |match| match&.named_captures || {} } # ensures the match is a hash
- .fetch('filename') # fetches the 'filename' key or raise KeyError
- .then(&File.method(:basename)) # Ensures to remove path from the filename (../ for instance)
- .then(&method(:ensure_filename_size)) # Ensures the filename is within the FILENAME_SIZE_LIMIT
- rescue KeyError
- raise ServiceError, 'Remote filename not provided in content-disposition header'
- end
-
- def ensure_filename_size(filename)
- if filename.length <= FILENAME_SIZE_LIMIT
- filename
- else
- extname = File.extname(filename)
- basename = File.basename(filename, extname)[0, FILENAME_SIZE_LIMIT]
-
- "#{basename}#{extname}"
- end
- end
end
end
diff --git a/app/services/bulk_imports/relation_export_service.rb b/app/services/bulk_imports/relation_export_service.rb
index c43f0d8cb4f..b1efa881180 100644
--- a/app/services/bulk_imports/relation_export_service.rb
+++ b/app/services/bulk_imports/relation_export_service.rb
@@ -65,7 +65,7 @@ module BulkImports
def export_service
@export_service ||= if config.tree_relation?(relation) || config.self_relation?(relation)
- TreeExportService.new(portable, config.export_path, relation)
+ TreeExportService.new(portable, config.export_path, relation, user)
elsif config.file_relation?(relation)
FileExportService.new(portable, config.export_path, relation)
else
diff --git a/app/services/bulk_imports/tree_export_service.rb b/app/services/bulk_imports/tree_export_service.rb
index 8e885e590d1..b6f094da558 100644
--- a/app/services/bulk_imports/tree_export_service.rb
+++ b/app/services/bulk_imports/tree_export_service.rb
@@ -2,11 +2,12 @@
module BulkImports
class TreeExportService
- def initialize(portable, export_path, relation)
+ def initialize(portable, export_path, relation, user)
@portable = portable
@export_path = export_path
@relation = relation
@config = FileTransfer.config_for(portable)
+ @user = user
end
def execute
@@ -27,7 +28,7 @@ module BulkImports
private
- attr_reader :export_path, :portable, :relation, :config
+ attr_reader :export_path, :portable, :relation, :config, :user
# rubocop: disable CodeReuse/Serializer
def serializer
@@ -35,7 +36,8 @@ module BulkImports
portable,
config.portable_tree,
json_writer,
- exportable_path: ''
+ exportable_path: '',
+ current_user: user
)
end
# rubocop: enable CodeReuse/Serializer
diff --git a/app/services/ci/after_requeue_job_service.rb b/app/services/ci/after_requeue_job_service.rb
index 1ae4639751b..634c547a623 100644
--- a/app/services/ci/after_requeue_job_service.rb
+++ b/app/services/ci/after_requeue_job_service.rb
@@ -21,9 +21,16 @@ module Ci
@processable.pipeline.reset_source_bridge!(current_user)
end
+ # rubocop: disable CodeReuse/ActiveRecord
def dependent_jobs
+ return legacy_dependent_jobs unless ::Feature.enabled?(:ci_requeue_with_dag_object_hierarchy, project)
+
ordered_by_dag(
- stage_dependent_jobs.or(needs_dependent_jobs).ordered_by_stage
+ ::Ci::Processable
+ .from_union(needs_dependent_jobs, stage_dependent_jobs)
+ .skipped
+ .ordered_by_stage
+ .preload(:needs)
)
end
@@ -34,22 +41,37 @@ module Ci
end
def stage_dependent_jobs
- skipped_jobs.after_stage(@processable.stage_idx)
+ @processable.pipeline.processables.after_stage(@processable.stage_idx)
end
def needs_dependent_jobs
- skipped_jobs.scheduling_type_dag.with_needs([@processable.name])
+ ::Gitlab::Ci::ProcessableObjectHierarchy.new(
+ ::Ci::Processable.where(id: @processable.id)
+ ).descendants
end
- def skipped_jobs
- @skipped_jobs ||= @processable.pipeline.processables.skipped
+ def legacy_skipped_jobs
+ @legacy_skipped_jobs ||= @processable.pipeline.processables.skipped
+ end
+
+ def legacy_dependent_jobs
+ ordered_by_dag(
+ legacy_stage_dependent_jobs.or(legacy_needs_dependent_jobs).ordered_by_stage.preload(:needs)
+ )
+ end
+
+ def legacy_stage_dependent_jobs
+ legacy_skipped_jobs.after_stage(@processable.stage_idx)
+ end
+
+ def legacy_needs_dependent_jobs
+ legacy_skipped_jobs.scheduling_type_dag.with_needs([@processable.name])
end
- # rubocop: disable CodeReuse/ActiveRecord
def ordered_by_dag(jobs)
sorted_job_names = sort_jobs(jobs).each_with_index.to_h
- jobs.preload(:needs).group_by(&:stage_idx).flat_map do |_, stage_jobs|
+ jobs.group_by(&:stage_idx).flat_map do |_, stage_jobs|
stage_jobs.sort_by { |job| sorted_job_names.fetch(job.name) }
end
end
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index 9705a236d98..566346a4b09 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -27,7 +27,7 @@ module Ci
job.trace.archive!
job.remove_pending_state!
- if Feature.enabled?(:datadog_integration_logs_collection, job.project) && job.job_artifacts_trace.present?
+ if job.job_artifacts_trace.present?
job.project.execute_integrations(Gitlab::DataBuilder::ArchiveTrace.build(job), :archive_trace_hooks)
end
diff --git a/app/services/ci/build_erase_service.rb b/app/services/ci/build_erase_service.rb
new file mode 100644
index 00000000000..8a468e094eb
--- /dev/null
+++ b/app/services/ci/build_erase_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Ci
+ class BuildEraseService
+ include BaseServiceUtility
+
+ def initialize(build, current_user)
+ @build = build
+ @current_user = current_user
+ end
+
+ def execute
+ unless build.erasable?
+ return ServiceResponse.error(message: _('Build cannot be erased'), http_status: :unprocessable_entity)
+ end
+
+ if build.project.refreshing_build_artifacts_size?
+ Gitlab::ProjectStatsRefreshConflictsLogger.warn_artifact_deletion_during_stats_refresh(
+ method: 'Ci::BuildEraseService#execute',
+ project_id: build.project_id
+ )
+ end
+
+ destroy_artifacts
+ erase_trace!
+ update_erased!
+
+ ServiceResponse.success(payload: build)
+ end
+
+ private
+
+ attr_reader :build, :current_user
+
+ def destroy_artifacts
+ # fix_expire_at is false because in this case we want to explicitly delete the job artifacts
+ # this flag is a workaround that will be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/355833
+ Ci::JobArtifacts::DestroyBatchService.new(build.job_artifacts, fix_expire_at: false).execute
+ end
+
+ def erase_trace!
+ build.trace.erase!
+ end
+
+ def update_erased!
+ build.update(erased_by: current_user, erased_at: Time.current, artifacts_expire_at: nil)
+ end
+ end
+end
diff --git a/app/services/ci/build_report_result_service.rb b/app/services/ci/build_report_result_service.rb
index f9146b3677a..20a31322919 100644
--- a/app/services/ci/build_report_result_service.rb
+++ b/app/services/ci/build_report_result_service.rb
@@ -22,7 +22,8 @@ module Ci
private
def generate_test_suite_report(build)
- build.collect_test_reports!(Gitlab::Ci::Reports::TestReport.new)
+ test_report = build.collect_test_reports!(Gitlab::Ci::Reports::TestReport.new)
+ test_report.get_suite(build.test_suite_name)
end
def tests_params(test_suite)
diff --git a/app/services/ci/compare_reports_base_service.rb b/app/services/ci/compare_reports_base_service.rb
index 9aba3a50ec1..ee687706b53 100644
--- a/app/services/ci/compare_reports_base_service.rb
+++ b/app/services/ci/compare_reports_base_service.rb
@@ -8,6 +8,8 @@ module Ci
# issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
class CompareReportsBaseService < ::BaseService
def execute(base_pipeline, head_pipeline)
+ return parsing_payload(base_pipeline, head_pipeline) if base_pipeline&.running?
+
base_report = get_report(base_pipeline)
head_report = get_report(head_pipeline)
comparer = build_comparer(base_report, head_report)
@@ -33,6 +35,13 @@ module Ci
protected
+ def parsing_payload(base_pipeline, head_pipeline)
+ {
+ status: :parsing,
+ key: key(base_pipeline, head_pipeline)
+ }
+ end
+
def build_comparer(base_report, head_report)
comparer_class.new(base_report, head_report)
end
diff --git a/app/services/ci/create_downstream_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index b38b3b93353..25cc9045052 100644
--- a/app/services/ci/create_downstream_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -11,6 +11,7 @@ module Ci
DuplicateDownstreamPipelineError = Class.new(StandardError)
MAX_NESTED_CHILDREN = 2
+ MAX_HIERARCHY_SIZE = 1000
def execute(bridge)
@bridge = bridge
@@ -86,6 +87,11 @@ module Ci
return false
end
+ if Feature.enabled?(:ci_limit_complete_hierarchy_size) && pipeline_tree_too_large?
+ @bridge.drop!(:reached_max_pipeline_hierarchy_size)
+ return false
+ end
+
unless can_create_downstream_pipeline?(target_ref)
@bridge.drop!(:insufficient_bridge_permissions)
return false
@@ -137,10 +143,17 @@ module Ci
return false unless @bridge.triggers_child_pipeline?
# only applies to parent-child pipelines not multi-project
- ancestors_of_new_child = @bridge.pipeline.self_and_ancestors
+ ancestors_of_new_child = @bridge.pipeline.self_and_project_ancestors
ancestors_of_new_child.count > MAX_NESTED_CHILDREN
end
+ def pipeline_tree_too_large?
+ return false unless @bridge.triggers_downstream_pipeline?
+
+ # Applies to the entire pipeline tree across all projects
+ @bridge.pipeline.complete_hierarchy_count >= MAX_HIERARCHY_SIZE
+ end
+
def config_checksum(pipeline)
[pipeline.project_id, pipeline.ref, pipeline.source].hash
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 02f25a82307..af175b8da1c 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -23,6 +23,7 @@ module Ci
Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
Gitlab::Ci::Pipeline::Chain::SeedBlock,
Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules,
+ Gitlab::Ci::Pipeline::Chain::AssignPartition,
Gitlab::Ci::Pipeline::Chain::Seed,
Gitlab::Ci::Pipeline::Chain::Limit::Size,
Gitlab::Ci::Pipeline::Chain::Limit::Deployments,
diff --git a/app/services/ci/delete_objects_service.rb b/app/services/ci/delete_objects_service.rb
index bac99abadc9..7a93d0e9665 100644
--- a/app/services/ci/delete_objects_service.rb
+++ b/app/services/ci/delete_objects_service.rb
@@ -27,9 +27,7 @@ module Ci
# `find_by_sql` performs a write in this case and we need to wrap it in
# a transaction to stick to the primary database.
Ci::DeletedObject.transaction do
- Ci::DeletedObject.find_by_sql([
- next_batch_sql, new_pick_up_at: RETRY_IN.from_now
- ])
+ Ci::DeletedObject.find_by_sql([next_batch_sql, new_pick_up_at: RETRY_IN.from_now])
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/ci/expire_pipeline_cache_service.rb b/app/services/ci/expire_pipeline_cache_service.rb
index bf2355c447a..15597eb7209 100644
--- a/app/services/ci/expire_pipeline_cache_service.rb
+++ b/app/services/ci/expire_pipeline_cache_service.rb
@@ -86,7 +86,7 @@ module Ci
etag_paths << path
end
- pipeline.all_pipelines_in_hierarchy.includes(project: [:route, { namespace: :route }]).each do |relative_pipeline| # rubocop: disable CodeReuse/ActiveRecord
+ pipeline.upstream_and_all_downstreams.includes(project: [:route, { namespace: :route }]).each do |relative_pipeline| # rubocop: disable CodeReuse/ActiveRecord
etag_paths << project_pipeline_path(relative_pipeline.project, relative_pipeline)
etag_paths << graphql_pipeline_path(relative_pipeline)
etag_paths << graphql_pipeline_sha_path(relative_pipeline.sha)
diff --git a/app/services/ci/generate_coverage_reports_service.rb b/app/services/ci/generate_coverage_reports_service.rb
index 81f26e84ef8..8beecb79fd9 100644
--- a/app/services/ci/generate_coverage_reports_service.rb
+++ b/app/services/ci/generate_coverage_reports_service.rb
@@ -43,7 +43,7 @@ module Ci
end
def last_update_timestamp(pipeline_hierarchy)
- pipeline_hierarchy&.self_and_descendants&.maximum(:updated_at)
+ pipeline_hierarchy&.self_and_project_descendants&.maximum(:updated_at)
end
end
end
diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb
index af56eb221d5..3dc097a8603 100644
--- a/app/services/ci/job_artifacts/create_service.rb
+++ b/app/services/ci/job_artifacts/create_service.rb
@@ -80,7 +80,7 @@ module Ci
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact_attributes = {
- job_id: job.id,
+ job: job,
project: project,
expire_in: expire_in
}
diff --git a/app/services/ci/job_artifacts/delete_service.rb b/app/services/ci/job_artifacts/delete_service.rb
new file mode 100644
index 00000000000..65cae03312e
--- /dev/null
+++ b/app/services/ci/job_artifacts/delete_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobArtifacts
+ class DeleteService
+ include BaseServiceUtility
+
+ def initialize(build)
+ @build = build
+ end
+
+ def execute
+ if build.project.refreshing_build_artifacts_size?
+ Gitlab::ProjectStatsRefreshConflictsLogger.warn_artifact_deletion_during_stats_refresh(
+ method: 'Ci::JobArtifacts::DeleteService#execute',
+ project_id: build.project_id
+ )
+ end
+
+ # fix_expire_at is false because in this case we want to explicitly delete the job artifacts
+ # this flag is a workaround that will be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/355833
+ Ci::JobArtifacts::DestroyBatchService.new(build.job_artifacts.erasable, fix_expire_at: false).execute
+
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :build
+ end
+ end
+end
diff --git a/app/services/ci/job_artifacts/track_artifact_report_service.rb b/app/services/ci/job_artifacts/track_artifact_report_service.rb
new file mode 100644
index 00000000000..1be1d98394f
--- /dev/null
+++ b/app/services/ci/job_artifacts/track_artifact_report_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobArtifacts
+ class TrackArtifactReportService
+ include Gitlab::Utils::UsageData
+
+ REPORT_TRACKED = %i[test].freeze
+
+ def execute(pipeline)
+ REPORT_TRACKED.each do |report|
+ if pipeline.complete_and_has_reports?(Ci::JobArtifact.of_report_type(report))
+ track_usage_event(event_name(report), pipeline.user_id)
+ end
+ end
+ end
+
+ def event_name(report)
+ "i_testing_#{report}_report_uploaded"
+ end
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_artifacts/coverage_report_service.rb b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
index c11a8f7a0fd..99877603554 100644
--- a/app/services/ci/pipeline_artifacts/coverage_report_service.rb
+++ b/app/services/ci/pipeline_artifacts/coverage_report_service.rb
@@ -27,12 +27,18 @@ module Ci
end
def pipeline_artifact_params
- {
+ attributes = {
pipeline: pipeline,
file_type: :code_coverage,
file: carrierwave_file,
size: carrierwave_file['tempfile'].size
}
+
+ if ::Feature.enabled?(:ci_update_unlocked_pipeline_artifacts, pipeline.project)
+ attributes[:locked] = pipeline.locked
+ end
+
+ attributes
end
def carrierwave_file
diff --git a/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb b/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb
index d6865efac9f..aeb68a75f88 100644
--- a/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb
+++ b/app/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service.rb
@@ -13,21 +13,31 @@ module Ci
return if pipeline.has_codequality_mr_diff_report?
return unless new_errors_introduced?
+ pipeline.pipeline_artifacts.create!(**artifact_attributes)
+ end
+
+ private
+
+ attr_reader :pipeline
+
+ def artifact_attributes
file = build_carrierwave_file!
- pipeline.pipeline_artifacts.create!(
+ attributes = {
project_id: pipeline.project_id,
file_type: :code_quality_mr_diff,
file_format: Ci::PipelineArtifact::REPORT_TYPES.fetch(:code_quality_mr_diff),
size: file["tempfile"].size,
file: file,
expire_at: Ci::PipelineArtifact::EXPIRATION_DATE.from_now
- )
- end
+ }
- private
+ if ::Feature.enabled?(:ci_update_unlocked_pipeline_artifacts, pipeline.project)
+ attributes[:locked] = pipeline.locked
+ end
- attr_reader :pipeline
+ attributes
+ end
def merge_requests
strong_memoize(:merge_requests) do
diff --git a/app/services/ci/pipelines/add_job_service.rb b/app/services/ci/pipelines/add_job_service.rb
index fc852bc3edd..dfbb37cf0dc 100644
--- a/app/services/ci/pipelines/add_job_service.rb
+++ b/app/services/ci/pipelines/add_job_service.rb
@@ -39,11 +39,13 @@ module Ci
job.pipeline = pipeline
job.project = pipeline.project
job.ref = pipeline.ref
+ job.partition_id = pipeline.partition_id
# update metadata since it might have been lazily initialised before this call
# metadata is present on `Ci::Processable`
if job.respond_to?(:metadata) && job.metadata
job.metadata.project = pipeline.project
+ job.metadata.partition_id = pipeline.partition_id
end
end
end
diff --git a/app/services/ci/queue/pending_builds_strategy.rb b/app/services/ci/queue/pending_builds_strategy.rb
index c8bdbba5e65..cfafe66d10b 100644
--- a/app/services/ci/queue/pending_builds_strategy.rb
+++ b/app/services/ci/queue/pending_builds_strategy.rb
@@ -19,7 +19,11 @@ module Ci
def builds_for_group_runner
return new_builds.none if runner.namespace_ids.empty?
- new_builds.where('ci_pending_builds.namespace_traversal_ids && ARRAY[?]::int[]', runner.namespace_ids)
+ new_builds_relation = new_builds.where('ci_pending_builds.namespace_traversal_ids && ARRAY[?]::int[]', runner.namespace_ids)
+
+ return order(new_builds_relation) if ::Feature.enabled?(:order_builds_for_group_runner)
+
+ new_builds_relation
end
def builds_matching_tag_ids(relation, ids)
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index b357855db12..0bd4bf8cc86 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -287,7 +287,7 @@ module Ci
Gitlab::ErrorTracking.track_exception(ex,
build_id: build.id,
build_name: build.name,
- build_stage: build.stage,
+ build_stage: build.stage_name,
pipeline_id: build.pipeline_id,
project_id: build.project_id
)
diff --git a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
index dfd97498fc8..d7078200c14 100644
--- a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
+++ b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
@@ -9,8 +9,10 @@ module Ci
free_resources = resource_group.resources.free.count
- resource_group.upcoming_processables.take(free_resources).each do |processable|
- processable.enqueue_waiting_for_resource
+ resource_group.upcoming_processables.take(free_resources).each do |upcoming|
+ Gitlab::OptimisticLocking.retry_lock(upcoming, name: 'enqueue_waiting_for_resource') do |processable|
+ processable.enqueue_waiting_for_resource
+ end
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/ci/runners/set_runner_associated_projects_service.rb b/app/services/ci/runners/set_runner_associated_projects_service.rb
new file mode 100644
index 00000000000..7930776749d
--- /dev/null
+++ b/app/services/ci/runners/set_runner_associated_projects_service.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+module Ci
+ module Runners
+ class SetRunnerAssociatedProjectsService
+ # @param [Ci::Runner] runner: the project runner to assign/unassign projects from
+ # @param [User] current_user: the user performing the operation
+ # @param [Array<Integer>] project_ids: the IDs of the associated projects to assign the runner to
+ def initialize(runner:, current_user:, project_ids:)
+ @runner = runner
+ @current_user = current_user
+ @project_ids = project_ids
+ end
+
+ def execute
+ unless current_user&.can?(:assign_runner, runner)
+ return ServiceResponse.error(message: 'user not allowed to assign runner', http_status: :forbidden)
+ end
+
+ return ServiceResponse.success if project_ids.blank?
+
+ set_associated_projects
+ end
+
+ private
+
+ def set_associated_projects
+ new_project_ids = [runner.owner_project.id] + project_ids
+
+ response = ServiceResponse.success
+ runner.transaction do
+ # rubocop:disable CodeReuse/ActiveRecord
+ current_project_ids = runner.projects.ids
+ # rubocop:enable CodeReuse/ActiveRecord
+
+ unless associate_new_projects(new_project_ids, current_project_ids)
+ response = ServiceResponse.error(message: 'failed to assign projects to runner')
+ raise ActiveRecord::Rollback, response.errors
+ end
+
+ unless disassociate_old_projects(new_project_ids, current_project_ids)
+ response = ServiceResponse.error(message: 'failed to destroy runner project')
+ raise ActiveRecord::Rollback, response.errors
+ end
+ end
+
+ response
+ end
+
+ def associate_new_projects(new_project_ids, current_project_ids)
+ missing_projects = Project.id_in(new_project_ids - current_project_ids)
+ missing_projects.all? { |project| runner.assign_to(project, current_user) }
+ end
+
+ def disassociate_old_projects(new_project_ids, current_project_ids)
+ projects_to_be_deleted = current_project_ids - new_project_ids
+ return true if projects_to_be_deleted.empty?
+
+ Ci::RunnerProject
+ .destroy_by(project_id: projects_to_be_deleted)
+ .all?(&:destroyed?)
+ end
+
+ attr_reader :runner, :current_user, :project_ids
+ end
+ end
+end
+
+Ci::Runners::SetRunnerAssociatedProjectsService.prepend_mod
diff --git a/app/services/ci/runners/update_runner_service.rb b/app/services/ci/runners/update_runner_service.rb
index 6cc080f81c2..bd01f52f396 100644
--- a/app/services/ci/runners/update_runner_service.rb
+++ b/app/services/ci/runners/update_runner_service.rb
@@ -9,11 +9,14 @@ module Ci
@runner = runner
end
- def update(params)
+ def execute(params)
params[:active] = !params.delete(:paused) if params.include?(:paused)
- runner.update(params).tap do |updated|
- runner.tick_runner_queue if updated
+ if runner.update(params)
+ runner.tick_runner_queue
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: runner.errors.full_messages)
end
end
end
diff --git a/app/services/ci/stuck_builds/drop_helpers.rb b/app/services/ci/stuck_builds/drop_helpers.rb
index dca50963883..f56c9aaeb55 100644
--- a/app/services/ci/stuck_builds/drop_helpers.rb
+++ b/app/services/ci/stuck_builds/drop_helpers.rb
@@ -48,7 +48,7 @@ module Ci
Gitlab::ErrorTracking.track_exception(ex,
build_id: build.id,
build_name: build.name,
- build_stage: build.stage,
+ build_stage: build.stage_name,
pipeline_id: build.pipeline_id,
project_id: build.project_id
)
diff --git a/app/services/ci/test_failure_history_service.rb b/app/services/ci/test_failure_history_service.rb
index 2214a6a2729..5a8072b2a0d 100644
--- a/app/services/ci/test_failure_history_service.rb
+++ b/app/services/ci/test_failure_history_service.rb
@@ -80,8 +80,8 @@ module Ci
end
def generate_test_suite!(build)
- # Returns an instance of Gitlab::Ci::Reports::TestSuite
- build.collect_test_reports!(Gitlab::Ci::Reports::TestReport.new)
+ test_report = build.collect_test_reports!(Gitlab::Ci::Reports::TestReport.new)
+ test_report.get_suite(build.test_suite_name)
end
def ci_unit_test_attrs(batch)
diff --git a/app/services/ci/unlock_artifacts_service.rb b/app/services/ci/unlock_artifacts_service.rb
index 30da31ba8ec..1fee31da4fc 100644
--- a/app/services/ci/unlock_artifacts_service.rb
+++ b/app/services/ci/unlock_artifacts_service.rb
@@ -7,9 +7,12 @@ module Ci
def execute(ci_ref, before_pipeline = nil)
results = {
unlocked_pipelines: 0,
- unlocked_job_artifacts: 0
+ unlocked_job_artifacts: 0,
+ unlocked_pipeline_artifacts: 0
}
+ unlock_pipeline_artifacts_enabled = ::Feature.enabled?(:ci_update_unlocked_pipeline_artifacts, ci_ref.project)
+
if ::Feature.enabled?(:ci_update_unlocked_job_artifacts, ci_ref.project)
loop do
unlocked_pipelines = []
@@ -18,6 +21,10 @@ module Ci
::Ci::Pipeline.transaction do
unlocked_pipelines = unlock_pipelines(ci_ref, before_pipeline)
unlocked_job_artifacts = unlock_job_artifacts(unlocked_pipelines)
+
+ if unlock_pipeline_artifacts_enabled
+ results[:unlocked_pipeline_artifacts] += unlock_pipeline_artifacts(unlocked_pipelines)
+ end
end
break if unlocked_pipelines.empty?
@@ -100,6 +107,14 @@ module Ci
)
end
+ # rubocop:disable CodeReuse/ActiveRecord
+ def unlock_pipeline_artifacts(pipelines)
+ return 0 if pipelines.empty?
+
+ ::Ci::PipelineArtifact.where(pipeline_id: pipelines.rows.flatten).update_all(locked: :unlocked)
+ end
+ # rubocop:enable CodeReuse/ActiveRecord
+
def unlock_pipelines(ci_ref, before_pipeline)
::Ci::Pipeline.connection.exec_query(unlock_pipelines_query(ci_ref, before_pipeline))
end
diff --git a/app/services/commits/create_service.rb b/app/services/commits/create_service.rb
index fc18420f6e4..a498d39d34e 100644
--- a/app/services/commits/create_service.rb
+++ b/app/services/commits/create_service.rb
@@ -66,7 +66,7 @@ module Commits
validate_on_branch!
validate_branch_existence!
- validate_new_branch_name! if different_branch?
+ validate_new_branch_name! if project.empty_repo? || different_branch?
end
def validate_permissions!
diff --git a/app/services/concerns/alert_management/alert_processing.rb b/app/services/concerns/alert_management/alert_processing.rb
index 8c6c7b15d28..9fe82507edd 100644
--- a/app/services/concerns/alert_management/alert_processing.rb
+++ b/app/services/concerns/alert_management/alert_processing.rb
@@ -113,7 +113,7 @@ module AlertManagement
end
def resolving_alert?
- incoming_payload.ends_at.present?
+ incoming_payload.resolved?
end
def notifying_alert?
@@ -121,7 +121,7 @@ module AlertManagement
end
def alert_source
- incoming_payload.monitoring_tool
+ incoming_payload.source
end
def logger
diff --git a/app/services/concerns/ci/downstream_pipeline_helpers.rb b/app/services/concerns/ci/downstream_pipeline_helpers.rb
index 39c0adb6e4e..26d7eb97151 100644
--- a/app/services/concerns/ci/downstream_pipeline_helpers.rb
+++ b/app/services/concerns/ci/downstream_pipeline_helpers.rb
@@ -5,7 +5,6 @@ module Ci
def log_downstream_pipeline_creation(downstream_pipeline)
return unless downstream_pipeline&.persisted?
- hierarchy_size = downstream_pipeline.all_pipelines_in_hierarchy.count
root_pipeline = downstream_pipeline.upstream_root
::Gitlab::AppLogger.info(
@@ -14,7 +13,7 @@ module Ci
root_pipeline_id: root_pipeline.id,
downstream_pipeline_id: downstream_pipeline.id,
downstream_pipeline_relationship: downstream_pipeline.parent_pipeline? ? :parent_child : :multi_project,
- hierarchy_size: hierarchy_size,
+ hierarchy_size: downstream_pipeline.complete_hierarchy_count,
root_pipeline_plan: root_pipeline.project.actual_plan_name,
root_pipeline_namespace_path: root_pipeline.project.namespace.full_path,
root_pipeline_project_path: root_pipeline.project.full_path
diff --git a/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb b/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb
index 23053975313..427aebf397e 100644
--- a/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb
+++ b/app/services/concerns/ci/job_token_scope/edit_scope_validations.rb
@@ -9,10 +9,6 @@ module Ci
"not exist or you don't have permission to perform this action"
def validate_edit!(source_project, target_project, current_user)
- unless source_project.ci_job_token_scope_enabled?
- raise ValidationError, "Job token scope is disabled for this project"
- end
-
unless can?(current_user, :admin_project, source_project)
raise ValidationError, "Insufficient permissions to modify the job token scope"
end
diff --git a/app/services/concerns/projects/container_repository/gitlab/timeoutable.rb b/app/services/concerns/projects/container_repository/gitlab/timeoutable.rb
new file mode 100644
index 00000000000..095f5aa7cfa
--- /dev/null
+++ b/app/services/concerns/projects/container_repository/gitlab/timeoutable.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Projects
+ module ContainerRepository
+ module Gitlab
+ module Timeoutable
+ extend ActiveSupport::Concern
+
+ DISABLED_TIMEOUTS = [nil, 0].freeze
+
+ TimeoutError = Class.new(StandardError)
+
+ private
+
+ def timeout?(start_time)
+ return false if service_timeout.in?(DISABLED_TIMEOUTS)
+
+ (Time.zone.now - start_time) > service_timeout
+ end
+
+ def service_timeout
+ ::Gitlab::CurrentSettings.current_application_settings.container_registry_delete_tags_service_timeout
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/container_expiration_policies/cleanup_service.rb b/app/services/container_expiration_policies/cleanup_service.rb
index 34889e58127..1123b29f217 100644
--- a/app/services/container_expiration_policies/cleanup_service.rb
+++ b/app/services/container_expiration_policies/cleanup_service.rb
@@ -24,7 +24,7 @@ module ContainerExpirationPolicies
begin
service_result = Projects::ContainerRepository::CleanupTagsService
- .new(repository, nil, policy_params.merge('container_expiration_policy' => true))
+ .new(container_repository: repository, params: policy_params.merge('container_expiration_policy' => true))
.execute
rescue StandardError
repository.cleanup_unfinished!
diff --git a/app/services/deployments/update_environment_service.rb b/app/services/deployments/update_environment_service.rb
index 3cacedc7d6e..90a31ae9370 100644
--- a/app/services/deployments/update_environment_service.rb
+++ b/app/services/deployments/update_environment_service.rb
@@ -61,6 +61,12 @@ module Deployments
ExpandVariables.expand(environment_url, -> { variables.sort_and_expand_all })
end
+ def expanded_auto_stop_in
+ return unless auto_stop_in
+
+ ExpandVariables.expand(auto_stop_in, -> { variables.sort_and_expand_all })
+ end
+
def environment_url
environment_options[:url]
end
@@ -69,6 +75,10 @@ module Deployments
environment_options[:action] || 'start'
end
+ def auto_stop_in
+ deployable&.environment_auto_stop_in
+ end
+
def renew_external_url
if (url = expanded_environment_url)
environment.external_url = url
@@ -78,7 +88,9 @@ module Deployments
def renew_auto_stop_in
return unless deployable
- environment.auto_stop_in = deployable.environment_auto_stop_in
+ if (value = expanded_auto_stop_in)
+ environment.auto_stop_in = value
+ end
end
def renew_deployment_tier
diff --git a/app/services/design_management/copy_design_collection/copy_service.rb b/app/services/design_management/copy_design_collection/copy_service.rb
index 886077191ab..3bc30f62a81 100644
--- a/app/services/design_management/copy_design_collection/copy_service.rb
+++ b/app/services/design_management/copy_design_collection/copy_service.rb
@@ -143,7 +143,7 @@ module DesignManagement
gitaly_actions = version.actions.map do |action|
design = action.design
# Map the raw Action#event enum value to a Gitaly "action" for the
- # `Repository#multi_action` call.
+ # `Repository#commit_files` call.
gitaly_action_name = @event_enum_map[action.event_before_type_cast]
# `content` will be the LfsPointer file and not the design file,
# and can be nil for deletions.
@@ -157,7 +157,7 @@ module DesignManagement
}.compact
end
- sha = target_repository.multi_action(
+ sha = target_repository.commit_files(
git_user,
branch_name: temporary_branch,
message: commit_message(version),
diff --git a/app/services/design_management/delete_designs_service.rb b/app/services/design_management/delete_designs_service.rb
index 9ed03a994c4..921c904d8de 100644
--- a/app/services/design_management/delete_designs_service.rb
+++ b/app/services/design_management/delete_designs_service.rb
@@ -16,7 +16,8 @@ module DesignManagement
version = delete_designs!
EventCreateService.new.destroy_designs(designs, current_user)
- Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_removed_action(author: current_user)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_removed_action(author: current_user,
+ project: project)
TodosDestroyer::DestroyedDesignsWorker.perform_async(designs.map(&:id))
success(version: version)
diff --git a/app/services/design_management/runs_design_actions.rb b/app/services/design_management/runs_design_actions.rb
index ee6aa9286d3..267ed6bf29f 100644
--- a/app/services/design_management/runs_design_actions.rb
+++ b/app/services/design_management/runs_design_actions.rb
@@ -15,7 +15,7 @@ module DesignManagement
def run_actions(actions, skip_system_notes: false)
raise NoActions if actions.empty?
- sha = repository.multi_action(current_user,
+ sha = repository.commit_files(current_user,
branch_name: target_branch,
message: commit_message,
actions: actions.map(&:gitaly_action))
diff --git a/app/services/design_management/save_designs_service.rb b/app/services/design_management/save_designs_service.rb
index a1fce45434b..64537293e65 100644
--- a/app/services/design_management/save_designs_service.rb
+++ b/app/services/design_management/save_designs_service.rb
@@ -131,9 +131,11 @@ module DesignManagement
def track_usage_metrics(action)
if action == :update
- ::Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_modified_action(author: current_user)
+ ::Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_modified_action(author: current_user,
+ project: project)
else
- ::Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_added_action(author: current_user)
+ ::Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_added_action(author: current_user,
+ project: project)
end
::Gitlab::UsageDataCounters::DesignsCounter.count(action)
diff --git a/app/services/environments/stop_service.rb b/app/services/environments/stop_service.rb
index 75c878c9350..774e3ffe273 100644
--- a/app/services/environments/stop_service.rb
+++ b/app/services/environments/stop_service.rb
@@ -25,8 +25,19 @@ module Environments
def execute_for_merge_request_pipeline(merge_request)
return unless merge_request.actual_head_pipeline&.merge_request?
- merge_request.environments_in_head_pipeline(deployment_status: :success).each do |environment|
- execute(environment)
+ created_environments = merge_request.created_environments
+
+ if created_environments.any?
+ created_environments.each { |env| execute(env) }
+ else
+ environments_in_head_pipeline = merge_request.environments_in_head_pipeline(deployment_status: :success)
+ environments_in_head_pipeline.each { |env| execute(env) }
+
+ if environments_in_head_pipeline.any?
+ # If we don't see a message often, we'd be able to remove this path. (or likely in GitLab 16.0)
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/372965
+ Gitlab::AppJsonLogger.info(message: 'Running legacy dynamic environment stop logic', project_id: project.id)
+ end
end
end
diff --git a/app/services/files/multi_service.rb b/app/services/files/multi_service.rb
index 65af4dd5a28..dd09ecafb4f 100644
--- a/app/services/files/multi_service.rb
+++ b/app/services/files/multi_service.rb
@@ -38,7 +38,7 @@ module Files
end
def commit_actions!(actions)
- repository.multi_action(
+ repository.commit_files(
current_user,
message: @commit_message,
branch_name: @branch_name,
diff --git a/app/services/google_cloud/create_cloudsql_instance_service.rb b/app/services/google_cloud/create_cloudsql_instance_service.rb
index f7fca277c52..8d040c6c908 100644
--- a/app/services/google_cloud/create_cloudsql_instance_service.rb
+++ b/app/services/google_cloud/create_cloudsql_instance_service.rb
@@ -11,7 +11,7 @@ module GoogleCloud
trigger_instance_setup_worker
success
rescue Google::Apis::Error => err
- error(err.to_json)
+ error(err.message)
end
private
diff --git a/app/services/google_cloud/enable_cloudsql_service.rb b/app/services/google_cloud/enable_cloudsql_service.rb
index a466b2f3696..e4a411d0fab 100644
--- a/app/services/google_cloud/enable_cloudsql_service.rb
+++ b/app/services/google_cloud/enable_cloudsql_service.rb
@@ -12,6 +12,8 @@ module GoogleCloud
end
success({ gcp_project_ids: unique_gcp_project_ids })
+ rescue Google::Apis::Error => err
+ error(err.message)
end
private
diff --git a/app/services/google_cloud/fetch_google_ip_list_service.rb b/app/services/google_cloud/fetch_google_ip_list_service.rb
new file mode 100644
index 00000000000..f7739971603
--- /dev/null
+++ b/app/services/google_cloud/fetch_google_ip_list_service.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+module GoogleCloud
+ class FetchGoogleIpListService
+ include BaseServiceUtility
+
+ GOOGLE_IP_RANGES_URL = 'https://www.gstatic.com/ipranges/cloud.json'
+ RESPONSE_BODY_LIMIT = 1.megabyte
+ EXPECTED_CONTENT_TYPE = 'application/json'
+
+ IpListNotRetrievedError = Class.new(StandardError)
+
+ def execute
+ # Prevent too many workers from hitting the same HTTP endpoint
+ if ::Gitlab::ApplicationRateLimiter.throttled?(:fetch_google_ip_list, scope: nil)
+ return error("#{self.class} was rate limited")
+ end
+
+ subnets = fetch_and_update_cache!
+
+ Gitlab::AppJsonLogger.info(class: self.class.name,
+ message: 'Successfully retrieved Google IP list',
+ subnet_count: subnets.count)
+
+ success({ subnets: subnets })
+ rescue IpListNotRetrievedError => err
+ Gitlab::ErrorTracking.log_exception(err)
+ error('Google IP list not retrieved')
+ end
+
+ private
+
+ # Attempts to retrieve and parse the list of IPs from Google. Updates
+ # the internal cache so that the data is accessible.
+ #
+ # Returns an array of IPAddr objects consisting of subnets.
+ def fetch_and_update_cache!
+ parsed_response = fetch_google_ip_list
+
+ parse_google_prefixes(parsed_response).tap do |subnets|
+ ::ObjectStorage::CDN::GoogleIpCache.update!(subnets)
+ end
+ end
+
+ def fetch_google_ip_list
+ response = Gitlab::HTTP.get(GOOGLE_IP_RANGES_URL, follow_redirects: false, allow_local_requests: false)
+
+ validate_response!(response)
+
+ response.parsed_response
+ end
+
+ def validate_response!(response)
+ raise IpListNotRetrievedError, "response was #{response.code}" unless response.code == 200
+ raise IpListNotRetrievedError, "response was nil" unless response.body
+
+ parsed_response = response.parsed_response
+
+ unless response.content_type == EXPECTED_CONTENT_TYPE && parsed_response.is_a?(Hash)
+ raise IpListNotRetrievedError, "response was not JSON"
+ end
+
+ if response.body&.bytesize.to_i > RESPONSE_BODY_LIMIT
+ raise IpListNotRetrievedError, "response was too large: #{response.body.bytesize}"
+ end
+
+ prefixes = parsed_response['prefixes']
+
+ raise IpListNotRetrievedError, "JSON was type #{prefixes.class}, expected Array" unless prefixes.is_a?(Array)
+ raise IpListNotRetrievedError, "#{GOOGLE_IP_RANGES_URL} did not return any IP ranges" if prefixes.empty?
+
+ response.parsed_response
+ end
+
+ def parse_google_prefixes(parsed_response)
+ ranges = parsed_response['prefixes'].map do |prefix|
+ ip_range = prefix['ipv4Prefix'] || prefix['ipv6Prefix']
+
+ next unless ip_range
+
+ IPAddr.new(ip_range)
+ end.compact
+
+ raise IpListNotRetrievedError, "#{GOOGLE_IP_RANGES_URL} did not return any IP ranges" if ranges.empty?
+
+ ranges
+ end
+ end
+end
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index 35716f7742a..d508865ef32 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -39,7 +39,7 @@ module Groups
if @group.save
@group.add_owner(current_user)
Integration.create_from_active_default_integrations(@group, :group_id)
- OnboardingProgress.onboard(@group)
+ Onboarding::Progress.onboard(@group)
end
end
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index ff5d5d2c4c1..53297d2412c 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -50,7 +50,7 @@ module Import
end
def project_name
- @project_name ||= params[:new_name].presence || repo.name
+ @project_name ||= params[:new_name].presence || repo[:name]
end
def namespace_path
@@ -66,13 +66,13 @@ module Import
end
def oversized?
- repository_size_limit > 0 && repo.size > repository_size_limit
+ repository_size_limit > 0 && repo[:size] > repository_size_limit
end
def oversize_error_message
_('"%{repository_name}" size (%{repository_size}) is larger than the limit of %{limit}.') % {
- repository_name: repo.name,
- repository_size: number_to_human_size(repo.size),
+ repository_name: repo[:name],
+ repository_size: number_to_human_size(repo[:size]),
limit: number_to_human_size(repository_size_limit)
}
end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index acd6d45af7a..70ad97f8436 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -285,7 +285,7 @@ class IssuableBaseService < ::BaseProjectService
if issuable.changed? || params.present? || widget_params.present?
issuable.assign_attributes(allowed_update_params(params))
- if has_title_or_description_changed?(issuable)
+ if issuable.description_changed?
issuable.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
end
@@ -398,10 +398,6 @@ class IssuableBaseService < ::BaseProjectService
update_task(issuable)
end
- def has_title_or_description_changed?(issuable)
- issuable.title_changed? || issuable.description_changed?
- end
-
def change_additional_attributes(issuable)
change_state(issuable)
change_subscription(issuable)
diff --git a/app/services/issuable_links/create_service.rb b/app/services/issuable_links/create_service.rb
index aca98596a02..2e9775af8c2 100644
--- a/app/services/issuable_links/create_service.rb
+++ b/app/services/issuable_links/create_service.rb
@@ -41,7 +41,7 @@ module IssuableLinks
set_link_type(link)
if link.changed? && link.save
- create_notes(referenced_issuable)
+ create_notes(link)
end
link
@@ -124,9 +124,9 @@ module IssuableLinks
:issue
end
- def create_notes(referenced_issuable)
- SystemNoteService.relate_issuable(issuable, referenced_issuable, current_user)
- SystemNoteService.relate_issuable(referenced_issuable, issuable, current_user)
+ def create_notes(issuable_link)
+ SystemNoteService.relate_issuable(issuable_link.source, issuable_link.target, current_user)
+ SystemNoteService.relate_issuable(issuable_link.target, issuable_link.source, current_user)
end
def linkable_issuables(objects)
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 61a95e49228..d75e74f3b19 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -28,9 +28,6 @@ module Issues
return if issue.relative_position.nil?
return if NO_REBALANCING_NEEDED.cover?(issue.relative_position)
- gates = [issue.project, issue.project.group].compact
- return unless gates.any? { |gate| Feature.enabled?(:rebalance_issues, gate) }
-
Issues::RebalancingWorker.perform_async(nil, *issue.project.self_or_root_group_ids)
end
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index d08e4d12a92..da888386e0a 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -24,7 +24,7 @@ module Issues
return issue
end
- if perform_close(issue)
+ if issue.close(current_user)
event_service.close_issue(issue, current_user)
create_note(issue, closed_via) if system_note
@@ -40,7 +40,7 @@ module Issues
if closed_via.is_a?(MergeRequest)
store_first_mentioned_in_commit_at(issue, closed_via)
- OnboardingProgressService.new(project.namespace).execute(action: :issue_auto_closed)
+ Onboarding::ProgressService.new(project.namespace).execute(action: :issue_auto_closed)
end
delete_milestone_closed_issue_counter_cache(issue.milestone)
@@ -51,11 +51,6 @@ module Issues
private
- # Overridden on EE
- def perform_close(issue)
- issue.close(current_user)
- end
-
def can_close?(issue, skip_authorization: false)
skip_authorization || can?(current_user, :update_issue, issue) || issue.is_a?(ExternalIssue)
end
diff --git a/app/services/issues/export_csv_service.rb b/app/services/issues/export_csv_service.rb
index 6209127bd86..46e4b865dc3 100644
--- a/app/services/issues/export_csv_service.rb
+++ b/app/services/issues/export_csv_service.rb
@@ -5,20 +5,20 @@ module Issues
include Gitlab::Routing.url_helpers
include GitlabRoutingHelper
- def initialize(issuables_relation, project)
- super
+ def initialize(issuables_relation, project, user = nil)
+ super(issuables_relation, project)
@labels = @issuables.labels_hash.transform_values { |labels| labels.sort.join(',').presence }
end
- def email(user)
- Notify.issues_csv_email(user, project, csv_data, csv_builder.status).deliver_now
+ def email(mail_to_user)
+ Notify.issues_csv_email(mail_to_user, project, csv_data, csv_builder.status).deliver_now
end
private
def associations_to_preload
- %i(author assignees timelogs milestone project)
+ [:author, :assignees, :timelogs, :milestone, { project: { namespace: :route } }]
end
def header_to_value_hash
diff --git a/app/services/issues/relative_position_rebalancing_service.rb b/app/services/issues/relative_position_rebalancing_service.rb
index 23bb409f3cd..b5c10430e83 100644
--- a/app/services/issues/relative_position_rebalancing_service.rb
+++ b/app/services/issues/relative_position_rebalancing_service.rb
@@ -16,8 +16,6 @@ module Issues
end
def execute
- return unless Feature.enabled?(:rebalance_issues, root_namespace)
-
# Given can_start_rebalance? and track_new_running_rebalance are not atomic
# it can happen that we end up with more than Rebalancing::State::MAX_NUMBER_OF_CONCURRENT_REBALANCES running.
# Considering the number of allowed Rebalancing::State::MAX_NUMBER_OF_CONCURRENT_REBALANCES is small we should be ok,
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index e003ecacb3f..f4f81e9455a 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -5,7 +5,7 @@ module Issues
def execute(issue, skip_authorization: false)
return issue unless can_reopen?(issue, skip_authorization: skip_authorization)
- if perform_reopen(issue)
+ if issue.reopen
event_service.reopen_issue(issue, current_user)
create_note(issue, 'reopened')
notification_service.async.reopen_issue(issue, current_user)
@@ -22,11 +22,6 @@ module Issues
private
- # Overriden on EE
- def perform_reopen(issue)
- issue.reopen
- end
-
def can_reopen?(issue, skip_authorization: false)
skip_authorization || can?(current_user, :reopen_issue, issue)
end
diff --git a/app/services/labels/transfer_service.rb b/app/services/labels/transfer_service.rb
index 67163cb8122..a79e5b00232 100644
--- a/app/services/labels/transfer_service.rb
+++ b/app/services/labels/transfer_service.rb
@@ -40,9 +40,9 @@ module Labels
def labels_to_transfer
Label
.from_union([
- group_labels_applied_to_issues,
- group_labels_applied_to_merge_requests
- ])
+ group_labels_applied_to_issues,
+ group_labels_applied_to_merge_requests
+ ])
.reorder(nil)
.distinct
end
diff --git a/app/services/members/update_service.rb b/app/services/members/update_service.rb
index b4d1b80e5a3..8ef3e307519 100644
--- a/app/services/members/update_service.rb
+++ b/app/services/members/update_service.rb
@@ -7,6 +7,8 @@ module Members
raise Gitlab::Access::AccessDeniedError unless can?(current_user, action_member_permission(permission, member), member)
raise Gitlab::Access::AccessDeniedError if prevent_upgrade_to_owner?(member) || prevent_downgrade_from_owner?(member)
+ return success(member: member) if update_results_in_no_change?(member)
+
old_access_level = member.human_access
old_expiry = member.expires_at
@@ -26,6 +28,13 @@ module Members
private
+ def update_results_in_no_change?(member)
+ return false if params[:expires_at]&.to_date != member.expires_at
+ return false if params[:access_level] != member.access_level
+
+ true
+ end
+
def downgrading_to_guest?
params[:access_level] == Gitlab::Access::GUEST
end
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index 93a0d375b97..9d12eb80eb6 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -28,7 +28,7 @@ module MergeRequests
merge_request.diffs(include_stats: false).write_cache
merge_request.create_cross_references!(current_user)
- OnboardingProgressService.new(merge_request.target_project.namespace).execute(action: :merge_request_created)
+ Onboarding::ProgressService.new(merge_request.target_project.namespace).execute(action: :merge_request_created)
todo_service.new_merge_request(merge_request, current_user)
merge_request.cache_merge_request_closes_issues!(current_user)
diff --git a/app/services/merge_requests/approval_service.rb b/app/services/merge_requests/approval_service.rb
index dcc4cf4bb1e..64ae33c9b15 100644
--- a/app/services/merge_requests/approval_service.rb
+++ b/app/services/merge_requests/approval_service.rb
@@ -17,19 +17,11 @@ module MergeRequests
# utilizing the `Gitlab::EventStore`.
#
# Workers can subscribe to the `MergeRequests::ApprovedEvent`.
- if Feature.enabled?(:async_after_approval, project)
- Gitlab::EventStore.publish(
- MergeRequests::ApprovedEvent.new(
- data: { current_user_id: current_user.id, merge_request_id: merge_request.id }
- )
+ Gitlab::EventStore.publish(
+ MergeRequests::ApprovedEvent.new(
+ data: { current_user_id: current_user.id, merge_request_id: merge_request.id }
)
- else
- create_event(merge_request)
- stream_audit_event(merge_request)
- create_approval_note(merge_request)
- mark_pending_todos_as_done(merge_request)
- execute_approval_hooks(merge_request, current_user)
- end
+ )
success
end
@@ -37,7 +29,7 @@ module MergeRequests
private
def can_be_approved?(merge_request)
- current_user.can?(:approve_merge_request, merge_request)
+ merge_request.can_be_approved_by?(current_user)
end
def save_approval(approval)
@@ -49,29 +41,6 @@ module MergeRequests
def reset_approvals_cache(merge_request)
merge_request.approvals.reset
end
-
- def create_event(merge_request)
- event_service.approve_mr(merge_request, current_user)
- end
-
- def stream_audit_event(merge_request)
- # Defined in EE
- end
-
- def create_approval_note(merge_request)
- SystemNoteService.approve_mr(merge_request, current_user)
- end
-
- def mark_pending_todos_as_done(merge_request)
- todo_service.resolve_todos_for_target(merge_request, current_user)
- end
-
- def execute_approval_hooks(merge_request, current_user)
- # Only one approval is required for a merge request to be approved
- notification_service.async.approve_mr(merge_request, current_user)
-
- execute_hooks(merge_request, 'approved')
- end
end
end
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index bda8dc64ac0..6cefd9169f5 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -43,8 +43,6 @@ module MergeRequests
end
def handle_assignees_change(merge_request, old_assignees)
- bulk_update_assignees_state(merge_request, merge_request.assignees - old_assignees)
-
MergeRequests::HandleAssigneesChangeService
.new(project: project, current_user: current_user)
.async_execute(merge_request, old_assignees)
@@ -60,7 +58,6 @@ module MergeRequests
new_reviewers = merge_request.reviewers - old_reviewers
merge_request_activity_counter.track_users_review_requested(users: new_reviewers)
merge_request_activity_counter.track_reviewers_changed_action(user: current_user)
- bulk_update_reviewers_state(merge_request, new_reviewers)
end
def cleanup_environments(merge_request)
@@ -247,46 +244,6 @@ module MergeRequests
Milestones::MergeRequestsCountService.new(milestone).delete_cache
end
-
- def bulk_update_assignees_state(merge_request, new_assignees)
- return unless current_user.mr_attention_requests_enabled?
- return if new_assignees.empty?
-
- assignees_map = merge_request.merge_request_assignees_with(new_assignees).to_h do |assignee|
- state = if assignee.user_id == current_user&.id
- :unreviewed
- else
- merge_request.find_reviewer(assignee.assignee)&.state || :attention_requested
- end
-
- [
- assignee,
- { state: MergeRequestAssignee.states[state], updated_state_by_user_id: current_user.id }
- ]
- end
-
- ::Gitlab::Database::BulkUpdate.execute(%i[state updated_state_by_user_id], assignees_map)
- end
-
- def bulk_update_reviewers_state(merge_request, new_reviewers)
- return unless current_user.mr_attention_requests_enabled?
- return if new_reviewers.empty?
-
- reviewers_map = merge_request.merge_request_reviewers_with(new_reviewers).to_h do |reviewer|
- state = if reviewer.user_id == current_user&.id
- :unreviewed
- else
- merge_request.find_assignee(reviewer.reviewer)&.state || :attention_requested
- end
-
- [
- reviewer,
- { state: MergeRequestReviewer.states[state], updated_state_by_user_id: current_user.id }
- ]
- end
-
- ::Gitlab::Database::BulkUpdate.execute(%i[state updated_state_by_user_id], reviewers_map)
- end
end
end
diff --git a/app/services/merge_requests/ff_merge_service.rb b/app/services/merge_requests/ff_merge_service.rb
index c5640047899..6e1d1b6ad23 100644
--- a/app/services/merge_requests/ff_merge_service.rb
+++ b/app/services/merge_requests/ff_merge_service.rb
@@ -8,26 +8,22 @@ module MergeRequests
# Executed when you do fast-forward merge via GitLab UI
#
class FfMergeService < MergeRequests::MergeService
- private
+ extend ::Gitlab::Utils::Override
- def commit
- ff_merge = repository.ff_merge(current_user,
- source,
- merge_request.target_branch,
- merge_request: merge_request)
+ private
- if merge_request.squash_on_merge?
- merge_request.update_column(:squash_commit_sha, merge_request.in_progress_merge_commit_sha)
- end
+ override :execute_git_merge
+ def execute_git_merge
+ repository.ff_merge(current_user,
+ source,
+ merge_request.target_branch,
+ merge_request: merge_request)
+ end
- ff_merge
- rescue Gitlab::Git::PreReceiveError => e
- Gitlab::ErrorTracking.track_exception(e, pre_receive_message: e.raw_message, merge_request_id: merge_request&.id)
- raise MergeError, e.message
- rescue StandardError => e
- raise MergeError, "Something went wrong during merge: #{e.message}"
- ensure
- merge_request.update_and_mark_in_progress_merge_commit_sha(nil)
+ override :merge_success_data
+ def merge_success_data(commit_id)
+ # There is no merge commit to update, so this is just blank.
+ {}
end
end
end
diff --git a/app/services/merge_requests/handle_assignees_change_service.rb b/app/services/merge_requests/handle_assignees_change_service.rb
index 87cd6544406..51be4690af4 100644
--- a/app/services/merge_requests/handle_assignees_change_service.rb
+++ b/app/services/merge_requests/handle_assignees_change_service.rb
@@ -21,7 +21,7 @@ module MergeRequests
merge_request_activity_counter.track_users_assigned_to_mr(users: new_assignees)
merge_request_activity_counter.track_assignees_changed_action(user: current_user)
- execute_assignees_hooks(merge_request, old_assignees) if options[:execute_hooks]
+ execute_assignees_hooks(merge_request, old_assignees) if options['execute_hooks']
end
private
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index f51923b7035..6d31a29f5a7 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -92,15 +92,26 @@ module MergeRequests
raise_error(GENERIC_ERROR_MESSAGE)
end
- merge_request.update!(merge_commit_sha: commit_id)
+ update_merge_sha_metadata(commit_id)
+
+ commit_id
ensure
merge_request.update_and_mark_in_progress_merge_commit_sha(nil)
end
+ def update_merge_sha_metadata(commit_id)
+ data_to_update = merge_success_data(commit_id)
+ data_to_update[:squash_commit_sha] = source if merge_request.squash_on_merge?
+
+ merge_request.update!(**data_to_update) if data_to_update.present?
+ end
+
+ def merge_success_data(commit_id)
+ { merge_commit_sha: commit_id }
+ end
+
def try_merge
- repository.merge(current_user, source, merge_request, commit_message).tap do
- merge_request.update_column(:squash_commit_sha, source) if merge_request.squash_on_merge?
- end
+ execute_git_merge
rescue Gitlab::Git::PreReceiveError => e
raise MergeError,
"Something went wrong during merge pre-receive hook. #{e.message}".strip
@@ -109,6 +120,10 @@ module MergeRequests
raise_error(GENERIC_ERROR_MESSAGE)
end
+ def execute_git_merge
+ repository.merge(current_user, source, merge_request, commit_message)
+ end
+
def after_merge
log_info("Post merge started on JID #{merge_jid} with state #{state}")
MergeRequests::PostMergeService.new(project: project, current_user: current_user).execute(merge_request)
diff --git a/app/services/merge_requests/mergeability/detailed_merge_status_service.rb b/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
new file mode 100644
index 00000000000..d25234183fd
--- /dev/null
+++ b/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ module Mergeability
+ class DetailedMergeStatusService
+ include ::Gitlab::Utils::StrongMemoize
+
+ def initialize(merge_request:)
+ @merge_request = merge_request
+ end
+
+ def execute
+ return :checking if checking?
+ return :unchecked if unchecked?
+
+ if check_results.success?
+
+ # If everything else is mergeable, but CI is not, the frontend expects two potential states to be returned
+ # See discussion: gitlab.com/gitlab-org/gitlab/-/merge_requests/96778#note_1093063523
+ if check_ci_results.success?
+ :mergeable
+ else
+ ci_check_failure_reason
+ end
+ else
+ check_results.failure_reason
+ end
+ end
+
+ private
+
+ attr_reader :merge_request, :checks, :ci_check
+
+ def checking?
+ merge_request.cannot_be_merged_rechecking? || merge_request.preparing? || merge_request.checking?
+ end
+
+ def unchecked?
+ merge_request.unchecked?
+ end
+
+ def check_results
+ strong_memoize(:check_results) do
+ merge_request.execute_merge_checks(params: { skip_ci_check: true })
+ end
+ end
+
+ def check_ci_results
+ strong_memoize(:check_ci_results) do
+ ::MergeRequests::Mergeability::CheckCiStatusService.new(merge_request: merge_request, params: {}).execute
+ end
+ end
+
+ def ci_check_failure_reason
+ if merge_request.actual_head_pipeline&.running?
+ :ci_still_running
+ else
+ check_ci_results.payload.fetch(:reason)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/mergeability/logger.rb b/app/services/merge_requests/mergeability/logger.rb
new file mode 100644
index 00000000000..8b45d231e03
--- /dev/null
+++ b/app/services/merge_requests/mergeability/logger.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ module Mergeability
+ class Logger
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(merge_request:, destination: Gitlab::AppJsonLogger)
+ @destination = destination
+ @merge_request = merge_request
+ end
+
+ def commit
+ return unless enabled?
+
+ commit_logs
+ end
+
+ def instrument(mergeability_name:)
+ raise ArgumentError, 'block not given' unless block_given?
+
+ return yield unless enabled?
+
+ op_start_db_counters = current_db_counter_payload
+ op_started_at = current_monotonic_time
+
+ result = yield
+
+ observe("mergeability.#{mergeability_name}.duration_s", current_monotonic_time - op_started_at)
+
+ observe_sql_counters(mergeability_name, op_start_db_counters, current_db_counter_payload)
+
+ result
+ end
+
+ private
+
+ attr_reader :destination, :merge_request
+
+ def observe(name, value)
+ return unless enabled?
+
+ observations[name.to_s].push(value)
+ end
+
+ def commit_logs
+ attributes = Gitlab::ApplicationContext.current.merge({
+ mergeability_project_id: merge_request.project.id
+ })
+
+ attributes[:mergeability_merge_request_id] = merge_request.id
+ attributes.merge!(observations_hash)
+ attributes.compact!
+ attributes.stringify_keys!
+
+ destination.info(attributes)
+ end
+
+ def observations_hash
+ transformed = observations.transform_values do |values|
+ next if values.empty?
+
+ {
+ 'values' => values
+ }
+ end.compact
+
+ transformed.each_with_object({}) do |key, hash|
+ key[1].each { |k, v| hash["#{key[0]}.#{k}"] = v }
+ end
+ end
+
+ def observations
+ strong_memoize(:observations) do
+ Hash.new { |hash, key| hash[key] = [] }
+ end
+ end
+
+ def observe_sql_counters(name, start_db_counters, end_db_counters)
+ end_db_counters.each do |key, value|
+ result = value - start_db_counters.fetch(key, 0)
+ next if result == 0
+
+ observe("mergeability.#{name}.#{key}", result)
+ end
+ end
+
+ def current_db_counter_payload
+ ::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_payload
+ end
+
+ def enabled?
+ strong_memoize(:enabled) do
+ ::Feature.enabled?(:mergeability_checks_logger, merge_request.project)
+ end
+ end
+
+ def current_monotonic_time
+ ::Gitlab::Metrics::System.monotonic_time
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/mergeability/run_checks_service.rb b/app/services/merge_requests/mergeability/run_checks_service.rb
index 68f842b3322..7f205c8dd6c 100644
--- a/app/services/merge_requests/mergeability/run_checks_service.rb
+++ b/app/services/merge_requests/mergeability/run_checks_service.rb
@@ -15,12 +15,17 @@ module MergeRequests
next if check.skip?
- check_result = run_check(check)
+ check_result = logger.instrument(mergeability_name: check_class.to_s.demodulize.underscore) do
+ run_check(check)
+ end
+
result_hash << check_result
break result_hash if check_result.failed?
end
+ logger.commit
+
self
end
@@ -57,6 +62,12 @@ module MergeRequests
Gitlab::MergeRequests::Mergeability::ResultsStore.new(merge_request: merge_request)
end
end
+
+ def logger
+ strong_memoize(:logger) do
+ MergeRequests::Mergeability::Logger.new(merge_request: merge_request)
+ end
+ end
end
end
end
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index 5205d34baae..533d0052fb8 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -234,6 +234,7 @@ module MergeRequests
end
# Add comment about pushing new commits to merge requests and send nofitication emails
+ #
def notify_about_push(merge_request)
return unless @commits.present?
diff --git a/app/services/merge_requests/update_assignees_service.rb b/app/services/merge_requests/update_assignees_service.rb
index a6b0235c525..a13db52e34b 100644
--- a/app/services/merge_requests/update_assignees_service.rb
+++ b/app/services/merge_requests/update_assignees_service.rb
@@ -20,8 +20,6 @@ module MergeRequests
attrs = update_attrs.merge(assignee_ids: new_ids)
merge_request.update!(**attrs)
- bulk_update_assignees_state(merge_request, merge_request.assignees - old_assignees)
-
# Defer the more expensive operations (handle_assignee_changes) to the background
MergeRequests::HandleAssigneesChangeService
.new(project: project, current_user: current_user)
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index 0902b5195a1..6d518edc88f 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -179,18 +179,16 @@ module MergeRequests
old_title_draft = MergeRequest.draft?(old_title)
new_title_draft = MergeRequest.draft?(new_title)
+ # notify the draft status changed. Added/removed message is handled in the
+ # email template itself, see `change_in_merge_request_draft_status_email` template.
+ notify_draft_status_changed(merge_request) if old_title_draft || new_title_draft
+
if !old_title_draft && new_title_draft
# Marked as Draft
- #
- merge_request_activity_counter
- .track_marked_as_draft_action(user: current_user)
+ merge_request_activity_counter.track_marked_as_draft_action(user: current_user)
elsif old_title_draft && !new_title_draft
# Unmarked as Draft
- #
- notify_draft_status_changed(merge_request)
-
- merge_request_activity_counter
- .track_unmarked_as_draft_action(user: current_user)
+ merge_request_activity_counter.track_unmarked_as_draft_action(user: current_user)
end
end
diff --git a/app/services/milestones/transfer_service.rb b/app/services/milestones/transfer_service.rb
index b9bd259ca8b..bbf6920f83b 100644
--- a/app/services/milestones/transfer_service.rb
+++ b/app/services/milestones/transfer_service.rb
@@ -35,10 +35,7 @@ module Milestones
# rubocop: disable CodeReuse/ActiveRecord
def milestones_to_transfer
- Milestone.from_union([
- group_milestones_applied_to_issues,
- group_milestones_applied_to_merge_requests
- ])
+ Milestone.from_union([group_milestones_applied_to_issues, group_milestones_applied_to_merge_requests])
.reorder(nil)
.distinct
end
diff --git a/app/services/namespaces/in_product_marketing_emails_service.rb b/app/services/namespaces/in_product_marketing_emails_service.rb
index c139b2e11dd..1ce7e4cae16 100644
--- a/app/services/namespaces/in_product_marketing_emails_service.rb
+++ b/app/services/namespaces/in_product_marketing_emails_service.rb
@@ -89,7 +89,7 @@ module Namespaces
end
def groups_for_track
- onboarding_progress_scope = OnboardingProgress
+ onboarding_progress_scope = Onboarding::Progress
.completed_actions_with_latest_in_range(completed_actions, range)
.incomplete_actions(incomplete_actions)
diff --git a/app/services/notification_recipients/builder/base.rb b/app/services/notification_recipients/builder/base.rb
index 0a7f25f1af3..3fabec29c0d 100644
--- a/app/services/notification_recipients/builder/base.rb
+++ b/app/services/notification_recipients/builder/base.rb
@@ -183,58 +183,6 @@ module NotificationRecipients
add_recipients(target.subscribers(project), :subscription, NotificationReason::SUBSCRIBED)
end
- # rubocop: disable CodeReuse/ActiveRecord
- def user_ids_notifiable_on(resource, notification_level = nil)
- return [] unless resource
-
- scope = resource.notification_settings
-
- if notification_level
- scope = scope.where(level: NotificationSetting.levels[notification_level])
- end
-
- scope.pluck(:user_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # Build a list of user_ids based on project notification settings
- def select_project_members_ids(global_setting, user_ids_global_level_watch)
- user_ids = user_ids_notifiable_on(project, :watch)
-
- # If project setting is global, add to watch list if global setting is watch
- user_ids + (global_setting & user_ids_global_level_watch)
- end
-
- # Build a list of user_ids based on group notification settings
- def select_group_members_ids(group, project_members, global_setting, user_ids_global_level_watch)
- uids = user_ids_notifiable_on(group, :watch)
-
- # Group setting is global, add to user_ids list if global setting is watch
- uids + (global_setting & user_ids_global_level_watch) - project_members
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def user_ids_with_global_level_watch(ids)
- settings_with_global_level_of(:watch, ids).pluck(:user_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def user_ids_with_global_level_custom(ids, action)
- settings_with_global_level_of(:custom, ids).pluck(:user_id)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def settings_with_global_level_of(level, ids)
- NotificationSetting.where(
- user_id: ids,
- source_type: nil,
- level: NotificationSetting.levels[level]
- )
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
def add_labels_subscribers(labels: nil)
return unless target.respond_to? :labels
diff --git a/app/services/onboarding/progress_service.rb b/app/services/onboarding/progress_service.rb
new file mode 100644
index 00000000000..66f7f2bc33d
--- /dev/null
+++ b/app/services/onboarding/progress_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Onboarding
+ class ProgressService
+ class Async
+ attr_reader :namespace_id
+
+ def initialize(namespace_id)
+ @namespace_id = namespace_id
+ end
+
+ def execute(action:)
+ return unless Onboarding::Progress.not_completed?(namespace_id, action)
+
+ Namespaces::OnboardingProgressWorker.perform_async(namespace_id, action)
+ end
+ end
+
+ def self.async(namespace_id)
+ Async.new(namespace_id)
+ end
+
+ def initialize(namespace)
+ @namespace = namespace&.root_ancestor
+ end
+
+ def execute(action:)
+ return unless @namespace
+
+ Onboarding::Progress.register(@namespace, action)
+ end
+ end
+end
diff --git a/app/services/onboarding_progress_service.rb b/app/services/onboarding_progress_service.rb
deleted file mode 100644
index 6d44c0a61ea..00000000000
--- a/app/services/onboarding_progress_service.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-class OnboardingProgressService
- class Async
- attr_reader :namespace_id
-
- def initialize(namespace_id)
- @namespace_id = namespace_id
- end
-
- def execute(action:)
- return unless OnboardingProgress.not_completed?(namespace_id, action)
-
- Namespaces::OnboardingProgressWorker.perform_async(namespace_id, action)
- end
- end
-
- def self.async(namespace_id)
- Async.new(namespace_id)
- end
-
- def initialize(namespace)
- @namespace = namespace&.root_ancestor
- end
-
- def execute(action:)
- return unless @namespace
-
- OnboardingProgress.register(@namespace, action)
- end
-end
diff --git a/app/services/packages/conan/search_service.rb b/app/services/packages/conan/search_service.rb
index 31ee9bea084..df22a895c00 100644
--- a/app/services/packages/conan/search_service.rb
+++ b/app/services/packages/conan/search_service.rb
@@ -44,7 +44,7 @@ module Packages
name, version, username, _ = query.split(%r{[@/]})
full_path = Packages::Conan::Metadatum.full_path_from(package_username: username)
project = Project.find_by_full_path(full_path)
- return unless Ability.allowed?(current_user, :read_package, project)
+ return unless Ability.allowed?(current_user, :read_package, project&.packages_policy_subject)
result = project.packages.with_name(name).with_version(version).order_created.last
[result&.conan_recipe].compact
diff --git a/app/services/packages/debian/generate_distribution_service.rb b/app/services/packages/debian/generate_distribution_service.rb
index 7db27f9234d..9b313202400 100644
--- a/app/services/packages/debian/generate_distribution_service.rb
+++ b/app/services/packages/debian/generate_distribution_service.rb
@@ -220,6 +220,7 @@ module Packages
valid_until_field,
rfc822_field('NotAutomatic', !@distribution.automatic, !@distribution.automatic),
rfc822_field('ButAutomaticUpgrades', @distribution.automatic_upgrades, !@distribution.automatic && @distribution.automatic_upgrades),
+ rfc822_field('Acquire-By-Hash', 'yes'),
rfc822_field('Architectures', @distribution.architectures.map { |architecture| architecture.name }.sort.join(' ')),
rfc822_field('Components', @distribution.components.map { |component| component.name }.sort.join(' ')),
rfc822_field('Description', @distribution.description)
diff --git a/app/services/packages/debian/process_changes_service.rb b/app/services/packages/debian/process_changes_service.rb
index b6e81012656..a29cbd3f65f 100644
--- a/app/services/packages/debian/process_changes_service.rb
+++ b/app/services/packages/debian/process_changes_service.rb
@@ -42,22 +42,30 @@ module Packages
def update_files_metadata
files.each do |filename, entry|
- entry.package_file.package = package
-
file_metadata = ::Packages::Debian::ExtractMetadataService.new(entry.package_file).execute
+ ::Packages::UpdatePackageFileService.new(entry.package_file, package_id: package.id)
+ .execute
+
+ # Force reload from database, as package has changed
+ entry.package_file.reload_package
+
entry.package_file.debian_file_metadatum.update!(
file_type: file_metadata[:file_type],
component: files[filename].component,
architecture: file_metadata[:architecture],
fields: file_metadata[:fields]
)
- entry.package_file.save!
end
end
def update_changes_metadata
- package_file.update!(package: package)
+ ::Packages::UpdatePackageFileService.new(package_file, package_id: package.id)
+ .execute
+
+ # Force reload from database, as package has changed
+ package_file.reload_package
+
package_file.debian_file_metadatum.update!(
file_type: metadata[:file_type],
fields: metadata[:fields]
diff --git a/app/services/packages/rpm/repository_metadata/base_builder.rb b/app/services/packages/rpm/repository_metadata/base_builder.rb
new file mode 100644
index 00000000000..9d76336d764
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/base_builder.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BaseBuilder
+ def execute
+ build_empty_structure
+ end
+
+ private
+
+ def build_empty_structure
+ Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|
+ xml.public_send(self.class::ROOT_TAG, self.class::ROOT_ATTRIBUTES) # rubocop:disable GitlabSecurity/PublicSend
+ end.to_xml
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rpm/repository_metadata/build_filelist_xml.rb b/app/services/packages/rpm/repository_metadata/build_filelist_xml.rb
new file mode 100644
index 00000000000..01fb36f4b91
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/build_filelist_xml.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BuildFilelistXml < ::Packages::Rpm::RepositoryMetadata::BaseBuilder
+ ROOT_TAG = 'filelists'
+ ROOT_ATTRIBUTES = {
+ xmlns: 'http://linux.duke.edu/metadata/filelists',
+ packages: '0'
+ }.freeze
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rpm/repository_metadata/build_other_xml.rb b/app/services/packages/rpm/repository_metadata/build_other_xml.rb
new file mode 100644
index 00000000000..4bf61c901a3
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/build_other_xml.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BuildOtherXml < ::Packages::Rpm::RepositoryMetadata::BaseBuilder
+ ROOT_TAG = 'otherdata'
+ ROOT_ATTRIBUTES = {
+ xmlns: 'http://linux.duke.edu/metadata/other',
+ packages: '0'
+ }.freeze
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rpm/repository_metadata/build_primary_xml.rb b/app/services/packages/rpm/repository_metadata/build_primary_xml.rb
new file mode 100644
index 00000000000..affb41677c2
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/build_primary_xml.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BuildPrimaryXml < ::Packages::Rpm::RepositoryMetadata::BaseBuilder
+ ROOT_TAG = 'metadata'
+ ROOT_ATTRIBUTES = {
+ xmlns: 'http://linux.duke.edu/metadata/common',
+ 'xmlns:rpm': 'http://linux.duke.edu/metadata/rpm',
+ packages: '0'
+ }.freeze
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rpm/repository_metadata/build_repomd_xml.rb b/app/services/packages/rpm/repository_metadata/build_repomd_xml.rb
new file mode 100644
index 00000000000..c6cfd77815d
--- /dev/null
+++ b/app/services/packages/rpm/repository_metadata/build_repomd_xml.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+module Packages
+ module Rpm
+ module RepositoryMetadata
+ class BuildRepomdXml
+ attr_reader :data
+
+ ROOT_ATTRIBUTES = {
+ xmlns: 'http://linux.duke.edu/metadata/repo',
+ 'xmlns:rpm': 'http://linux.duke.edu/metadata/rpm'
+ }.freeze
+
+ # Expected `data` structure
+ #
+ # data = {
+ # filelists: {
+ # checksum: { type: "sha256", value: "123" },
+ # location: { href: "repodata/123-filelists.xml.gz" },
+ # ...
+ # },
+ # ...
+ # }
+ def initialize(data)
+ @data = data
+ end
+
+ def execute
+ build_repomd
+ end
+
+ private
+
+ def build_repomd
+ Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|
+ xml.repomd(ROOT_ATTRIBUTES) do
+ xml.revision Time.now.to_i
+ build_data_info(xml)
+ end
+ end.to_xml
+ end
+
+ def build_data_info(xml)
+ data.each do |filename, info|
+ xml.data(type: filename) do
+ build_file_info(info, xml)
+ end
+ end
+ end
+
+ def build_file_info(info, xml)
+ info.each do |key, attributes|
+ value = attributes.delete(:value)
+ xml.public_send(key, value, attributes) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rubygems/dependency_resolver_service.rb b/app/services/packages/rubygems/dependency_resolver_service.rb
index c44b26e2b92..839a7683632 100644
--- a/app/services/packages/rubygems/dependency_resolver_service.rb
+++ b/app/services/packages/rubygems/dependency_resolver_service.rb
@@ -8,7 +8,10 @@ module Packages
DEFAULT_PLATFORM = 'ruby'
def execute
- return ServiceResponse.error(message: "forbidden", http_status: :forbidden) unless Ability.allowed?(current_user, :read_package, project)
+ unless Ability.allowed?(current_user, :read_package, project&.packages_policy_subject)
+ return ServiceResponse.error(message: "forbidden", http_status: :forbidden)
+ end
+
return ServiceResponse.error(message: "#{gem_name} not found", http_status: :not_found) if packages.empty?
payload = packages.map do |package|
diff --git a/app/services/post_receive_service.rb b/app/services/post_receive_service.rb
index 15c978e6763..c376b4036f8 100644
--- a/app/services/post_receive_service.rb
+++ b/app/services/post_receive_service.rb
@@ -101,7 +101,7 @@ class PostReceiveService
def record_onboarding_progress
return unless project
- OnboardingProgressService.new(project.namespace).execute(action: :git_write)
+ Onboarding::ProgressService.new(project.namespace).execute(action: :git_write)
end
end
diff --git a/app/services/projects/alerting/notify_service.rb b/app/services/projects/alerting/notify_service.rb
index c21a61bcb52..9403c7bcfed 100644
--- a/app/services/projects/alerting/notify_service.rb
+++ b/app/services/projects/alerting/notify_service.rb
@@ -2,14 +2,13 @@
module Projects
module Alerting
- class NotifyService
+ class NotifyService < ::BaseProjectService
extend ::Gitlab::Utils::Override
include ::AlertManagement::AlertProcessing
include ::AlertManagement::Responses
- def initialize(project, payload)
- @project = project
- @payload = payload
+ def initialize(project, params)
+ super(project: project, params: params.to_h)
end
def execute(token, integration = nil)
@@ -29,15 +28,11 @@ module Projects
private
- attr_reader :project, :payload, :integration
+ attr_reader :integration
+ alias_method :payload, :params
def valid_payload_size?
- Gitlab::Utils::DeepSize.new(payload.to_h).valid?
- end
-
- override :alert_source
- def alert_source
- super || integration&.name || 'Generic Alert Endpoint'
+ Gitlab::Utils::DeepSize.new(params).valid?
end
def active_integration?
diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb
index b324ea27360..57b913b04e6 100644
--- a/app/services/projects/blame_service.rb
+++ b/app/services/projects/blame_service.rb
@@ -10,6 +10,7 @@ module Projects
@blob = blob
@commit = commit
@page = extract_page(params)
+ @pagination_enabled = pagination_state(params)
end
attr_reader :page
@@ -19,7 +20,7 @@ module Projects
end
def pagination
- return unless pagination_enabled?
+ return unless pagination_enabled
Kaminari.paginate_array([], total_count: blob_lines_count, limit: per_page)
.tap { |pagination| pagination.max_paginates_per(per_page) }
@@ -28,10 +29,10 @@ module Projects
private
- attr_reader :blob, :commit
+ attr_reader :blob, :commit, :pagination_enabled
def blame_range
- return unless pagination_enabled?
+ return unless pagination_enabled
first_line = (page - 1) * per_page + 1
last_line = (first_line + per_page).to_i - 1
@@ -51,6 +52,12 @@ module Projects
PER_PAGE
end
+ def pagination_state(params)
+ return false if Gitlab::Utils.to_boolean(params[:no_pagination], default: false)
+
+ Feature.enabled?(:blame_page_pagination, commit.project)
+ end
+
def overlimit?(page)
page * per_page >= blob_lines_count + per_page
end
@@ -58,9 +65,5 @@ module Projects
def blob_lines_count
@blob_lines_count ||= blob.data.lines.count
end
-
- def pagination_enabled?
- Feature.enabled?(:blame_page_pagination, commit.project)
- end
end
end
diff --git a/app/services/projects/container_repository/base_container_repository_service.rb b/app/services/projects/container_repository/base_container_repository_service.rb
new file mode 100644
index 00000000000..d7539737e78
--- /dev/null
+++ b/app/services/projects/container_repository/base_container_repository_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Projects
+ module ContainerRepository
+ class BaseContainerRepositoryService < ::BaseContainerService
+ include ::Gitlab::Utils::StrongMemoize
+
+ alias_method :container_repository, :container
+
+ def initialize(container_repository:, current_user: nil, params: {})
+ super(container: container_repository, current_user: current_user, params: params)
+ end
+
+ delegate :project, to: :container_repository
+ end
+ end
+end
diff --git a/app/services/projects/container_repository/cleanup_tags_base_service.rb b/app/services/projects/container_repository/cleanup_tags_base_service.rb
new file mode 100644
index 00000000000..8ea4ae4830a
--- /dev/null
+++ b/app/services/projects/container_repository/cleanup_tags_base_service.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+module Projects
+ module ContainerRepository
+ class CleanupTagsBaseService < BaseContainerRepositoryService
+ private
+
+ def filter_out_latest!(tags)
+ tags.reject!(&:latest?)
+ end
+
+ def filter_by_name!(tags)
+ regex_delete = ::Gitlab::UntrustedRegexp.new("\\A#{name_regex_delete || name_regex}\\z")
+ regex_retain = ::Gitlab::UntrustedRegexp.new("\\A#{name_regex_keep}\\z")
+
+ tags.select! do |tag|
+ # regex_retain will override any overlapping matches by regex_delete
+ regex_delete.match?(tag.name) && !regex_retain.match?(tag.name)
+ end
+ end
+
+ # Should return [tags_to_delete, tags_to_keep]
+ def partition_by_keep_n(tags)
+ return [tags, []] unless keep_n
+
+ tags = order_by_date_desc(tags)
+
+ tags.partition.with_index { |_, index| index >= keep_n_as_integer }
+ end
+
+ # Should return [tags_to_delete, tags_to_keep]
+ def partition_by_older_than(tags)
+ return [tags, []] unless older_than
+
+ older_than_timestamp = older_than_in_seconds.ago
+
+ tags.partition do |tag|
+ timestamp = pushed_at(tag)
+
+ timestamp && timestamp < older_than_timestamp
+ end
+ end
+
+ def order_by_date_desc(tags)
+ now = DateTime.current
+ tags.sort_by! { |tag| pushed_at(tag) || now }
+ .reverse!
+ end
+
+ def delete_tags(tags)
+ return success(deleted: []) unless tags.any?
+
+ service = Projects::ContainerRepository::DeleteTagsService.new(
+ project,
+ current_user,
+ tags: tags.map(&:name),
+ container_expiration_policy: container_expiration_policy
+ )
+
+ service.execute(container_repository)
+ end
+
+ def can_destroy?
+ return true if container_expiration_policy
+
+ can?(current_user, :destroy_container_image, project)
+ end
+
+ def valid_regex?
+ %w[name_regex_delete name_regex name_regex_keep].each do |param_name|
+ regex = params[param_name]
+ ::Gitlab::UntrustedRegexp.new(regex) unless regex.blank?
+ end
+ true
+ rescue RegexpError => e
+ ::Gitlab::ErrorTracking.log_exception(e, project_id: project.id)
+ false
+ end
+
+ def older_than
+ params['older_than']
+ end
+
+ def name_regex_delete
+ params['name_regex_delete']
+ end
+
+ def name_regex
+ params['name_regex']
+ end
+
+ def name_regex_keep
+ params['name_regex_keep']
+ end
+
+ def container_expiration_policy
+ params['container_expiration_policy']
+ end
+
+ def keep_n
+ params['keep_n']
+ end
+
+ def project
+ container_repository.project
+ end
+
+ def keep_n_as_integer
+ keep_n.to_i
+ end
+
+ def older_than_in_seconds
+ strong_memoize(:older_than_in_seconds) do
+ ChronicDuration.parse(older_than).seconds
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/projects/container_repository/cleanup_tags_service.rb b/app/services/projects/container_repository/cleanup_tags_service.rb
index 0a8e8e72766..285c3e252ef 100644
--- a/app/services/projects/container_repository/cleanup_tags_service.rb
+++ b/app/services/projects/container_repository/cleanup_tags_service.rb
@@ -2,39 +2,33 @@
module Projects
module ContainerRepository
- class CleanupTagsService
- include BaseServiceUtility
- include ::Gitlab::Utils::StrongMemoize
+ class CleanupTagsService < CleanupTagsBaseService
+ def initialize(container_repository:, current_user: nil, params: {})
+ super
- def initialize(container_repository, user = nil, params = {})
- @container_repository = container_repository
- @current_user = user
@params = params.dup
-
- @project = container_repository.project
- @tags = container_repository.tags
- tags_size = @tags.size
- @counts = {
- original_size: tags_size,
- cached_tags_count: 0
- }
+ @counts = { cached_tags_count: 0 }
end
def execute
return error('access denied') unless can_destroy?
return error('invalid regex') unless valid_regex?
- filter_out_latest
- filter_by_name
+ tags = container_repository.tags
+ @counts[:original_size] = tags.size
+
+ filter_out_latest!(tags)
+ filter_by_name!(tags)
+
+ tags = truncate(tags)
+ populate_from_cache(tags)
- truncate
- populate_from_cache
+ tags = filter_keep_n(tags)
+ tags = filter_by_older_than(tags)
- filter_keep_n
- filter_by_older_than
+ @counts[:before_delete_size] = tags.size
- delete_tags.merge(@counts).tap do |result|
- result[:before_delete_size] = @tags.size
+ delete_tags(tags).merge(@counts).tap do |result|
result[:deleted_size] = result[:deleted]&.size
result[:status] = :error if @counts[:before_truncate_size] != @counts[:after_truncate_size]
@@ -43,94 +37,45 @@ module Projects
private
- def delete_tags
- return success(deleted: []) unless @tags.any?
-
- service = Projects::ContainerRepository::DeleteTagsService.new(
- @project,
- @current_user,
- tags: @tags.map(&:name),
- container_expiration_policy: container_expiration_policy
- )
-
- service.execute(@container_repository)
- end
-
- def filter_out_latest
- @tags.reject!(&:latest?)
- end
-
- def order_by_date
- now = DateTime.current
- @tags.sort_by! { |tag| tag.created_at || now }
- .reverse!
- end
+ def filter_keep_n(tags)
+ tags, tags_to_keep = partition_by_keep_n(tags)
- def filter_by_name
- regex_delete = ::Gitlab::UntrustedRegexp.new("\\A#{name_regex_delete || name_regex}\\z")
- regex_retain = ::Gitlab::UntrustedRegexp.new("\\A#{name_regex_keep}\\z")
-
- @tags.select! do |tag|
- # regex_retain will override any overlapping matches by regex_delete
- regex_delete.match?(tag.name) && !regex_retain.match?(tag.name)
- end
- end
-
- def filter_keep_n
- return unless keep_n
+ cache_tags(tags_to_keep)
- order_by_date
- cache_tags(@tags.first(keep_n_as_integer))
- @tags = @tags.drop(keep_n_as_integer)
+ tags
end
- def filter_by_older_than
- return unless older_than
-
- older_than_timestamp = older_than_in_seconds.ago
-
- @tags, tags_to_keep = @tags.partition do |tag|
- tag.created_at && tag.created_at < older_than_timestamp
- end
+ def filter_by_older_than(tags)
+ tags, tags_to_keep = partition_by_older_than(tags)
cache_tags(tags_to_keep)
- end
- def can_destroy?
- return true if container_expiration_policy
-
- can?(@current_user, :destroy_container_image, @project)
+ tags
end
- def valid_regex?
- %w(name_regex_delete name_regex name_regex_keep).each do |param_name|
- regex = @params[param_name]
- ::Gitlab::UntrustedRegexp.new(regex) unless regex.blank?
- end
- true
- rescue RegexpError => e
- ::Gitlab::ErrorTracking.log_exception(e, project_id: @project.id)
- false
+ def pushed_at(tag)
+ tag.created_at
end
- def truncate
- @counts[:before_truncate_size] = @tags.size
- @counts[:after_truncate_size] = @tags.size
+ def truncate(tags)
+ @counts[:before_truncate_size] = tags.size
+ @counts[:after_truncate_size] = tags.size
- return if max_list_size == 0
+ return tags if max_list_size == 0
# truncate the list to make sure that after the #filter_keep_n
# execution, the resulting list will be max_list_size
truncated_size = max_list_size + keep_n_as_integer
- return if @tags.size <= truncated_size
+ return tags if tags.size <= truncated_size
- @tags = @tags.sample(truncated_size)
- @counts[:after_truncate_size] = @tags.size
+ tags = tags.sample(truncated_size)
+ @counts[:after_truncate_size] = tags.size
+ tags
end
- def populate_from_cache
- @counts[:cached_tags_count] = cache.populate(@tags) if caching_enabled?
+ def populate_from_cache(tags)
+ @counts[:cached_tags_count] = cache.populate(tags) if caching_enabled?
end
def cache_tags(tags)
@@ -139,7 +84,7 @@ module Projects
def cache
strong_memoize(:cache) do
- ::Gitlab::ContainerRepository::Tags::Cache.new(@container_repository)
+ ::Gitlab::ContainerRepository::Tags::Cache.new(container_repository)
end
end
@@ -153,40 +98,6 @@ module Projects
def max_list_size
::Gitlab::CurrentSettings.current_application_settings.container_registry_cleanup_tags_service_max_list_size.to_i
end
-
- def keep_n
- @params['keep_n']
- end
-
- def keep_n_as_integer
- keep_n.to_i
- end
-
- def older_than_in_seconds
- strong_memoize(:older_than_in_seconds) do
- ChronicDuration.parse(older_than).seconds
- end
- end
-
- def older_than
- @params['older_than']
- end
-
- def name_regex_delete
- @params['name_regex_delete']
- end
-
- def name_regex
- @params['name_regex']
- end
-
- def name_regex_keep
- @params['name_regex_keep']
- end
-
- def container_expiration_policy
- @params['container_expiration_policy']
- end
end
end
end
diff --git a/app/services/projects/container_repository/gitlab/cleanup_tags_service.rb b/app/services/projects/container_repository/gitlab/cleanup_tags_service.rb
new file mode 100644
index 00000000000..81bb94c867a
--- /dev/null
+++ b/app/services/projects/container_repository/gitlab/cleanup_tags_service.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+module Projects
+ module ContainerRepository
+ module Gitlab
+ class CleanupTagsService < CleanupTagsBaseService
+ include ::Projects::ContainerRepository::Gitlab::Timeoutable
+
+ TAGS_PAGE_SIZE = 1000
+
+ def initialize(container_repository:, current_user: nil, params: {})
+ super
+ @params = params.dup
+ end
+
+ def execute
+ return error('access denied') unless can_destroy?
+ return error('invalid regex') unless valid_regex?
+
+ with_timeout do |start_time, result|
+ container_repository.each_tags_page(page_size: TAGS_PAGE_SIZE) do |tags|
+ execute_for_tags(tags, result)
+
+ raise TimeoutError if timeout?(start_time)
+ end
+ end
+ end
+
+ private
+
+ def execute_for_tags(tags, overall_result)
+ original_size = tags.size
+
+ filter_out_latest!(tags)
+ filter_by_name!(tags)
+
+ tags = filter_by_keep_n(tags)
+ tags = filter_by_older_than(tags)
+
+ overall_result[:before_delete_size] += tags.size
+ overall_result[:original_size] += original_size
+
+ result = delete_tags(tags)
+
+ overall_result[:deleted_size] += result[:deleted]&.size
+ overall_result[:deleted] += result[:deleted]
+ overall_result[:status] = result[:status] unless overall_result[:status] == :error
+ end
+
+ def with_timeout
+ result = {
+ original_size: 0,
+ before_delete_size: 0,
+ deleted_size: 0,
+ deleted: []
+ }
+
+ yield Time.zone.now, result
+
+ result
+ rescue TimeoutError
+ result[:status] = :error
+
+ result
+ end
+
+ def filter_by_keep_n(tags)
+ partition_by_keep_n(tags).first
+ end
+
+ def filter_by_older_than(tags)
+ partition_by_older_than(tags).first
+ end
+
+ def pushed_at(tag)
+ tag.updated_at || tag.created_at
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/projects/container_repository/gitlab/delete_tags_service.rb b/app/services/projects/container_repository/gitlab/delete_tags_service.rb
index 81cef554dec..530cf87c338 100644
--- a/app/services/projects/container_repository/gitlab/delete_tags_service.rb
+++ b/app/services/projects/container_repository/gitlab/delete_tags_service.rb
@@ -6,10 +6,7 @@ module Projects
class DeleteTagsService
include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
-
- DISABLED_TIMEOUTS = [nil, 0].freeze
-
- TimeoutError = Class.new(StandardError)
+ include ::Projects::ContainerRepository::Gitlab::Timeoutable
def initialize(container_repository, tag_names)
@container_repository = container_repository
@@ -44,16 +41,6 @@ module Projects
@deleted_tags.any? ? success(deleted: @deleted_tags) : error('could not delete tags')
end
-
- def timeout?(start_time)
- return false if service_timeout.in?(DISABLED_TIMEOUTS)
-
- (Time.zone.now - start_time) > service_timeout
- end
-
- def service_timeout
- ::Gitlab::CurrentSettings.current_application_settings.container_registry_delete_tags_service_timeout
- end
end
end
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 6381ee67ce7..c72f9b4b602 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -96,7 +96,7 @@ module Projects
log_info("#{current_user.name} created a new project \"#{@project.full_name}\"")
if @project.import?
- experiment(:combined_registration, user: current_user).track(:import_project)
+ Gitlab::Tracking.event(self.class.name, 'import_project', user: current_user)
else
# Skip writing the config for project imports/forks because it
# will always fail since the Git directory doesn't exist until
@@ -158,14 +158,25 @@ module Projects
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
else
- @project.add_owner(@project.namespace.owner, current_user: current_user)
+ owner_user = @project.namespace.owner
+ owner_member = @project.add_owner(owner_user, current_user: current_user)
+
+ # There is a possibility that the sidekiq job to refresh the authorizations of the owner_user in this project
+ # isn't picked up (or finished) by the time the user is redirected to the newly created project's page.
+ # If that happens, the user will hit a 404. To avoid that scenario, we manually create a `project_authorizations` record for the user here.
+ if owner_member.persisted?
+ owner_user.project_authorizations.safe_find_or_create_by(
+ project: @project,
+ access_level: ProjectMember::OWNER
+ )
+ end
# During the process of adding a project owner, a check on permissions is made on the user which caches
# the max member access for that user on this project.
# Since that is `0` before the member is created - and we are still inside the request
# cycle when we need to do other operations that might check those permissions (e.g. write a commit)
# we need to purge that cache so that the updated permissions is fetched instead of using the outdated cached value of 0
# from before member creation
- @project.team.purge_member_access_cache_for_user_id(@project.namespace.owner.id)
+ @project.team.purge_member_access_cache_for_user_id(owner_user.id)
end
end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index 06a44b07f9f..f1525ed9763 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -67,9 +67,9 @@ module Projects
end
def remove_snippets
- # We're setting the hard_delete param because we dont need to perform the access checks within the service since
+ # We're setting the skip_authorization param because we dont need to perform the access checks within the service since
# the user has enough access rights to remove the project and its resources.
- response = ::Snippets::BulkDestroyService.new(current_user, project.snippets).execute(hard_delete: true)
+ response = ::Snippets::BulkDestroyService.new(current_user, project.snippets).execute(skip_authorization: true)
if response.error?
log_error("Snippet deletion failed on #{project.full_path} with the following message: #{response.message}")
@@ -134,6 +134,8 @@ module Projects
destroy_ci_records!
destroy_mr_diff_relations!
+ destroy_merge_request_diffs! if ::Feature.enabled?(:extract_mr_diff_deletions)
+
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
@@ -158,10 +160,9 @@ module Projects
#
# rubocop: disable CodeReuse/ActiveRecord
def destroy_mr_diff_relations!
- mr_batch_size = 100
delete_batch_size = 1000
- project.merge_requests.each_batch(column: :iid, of: mr_batch_size) do |relation_ids|
+ project.merge_requests.each_batch(column: :iid, of: BATCH_SIZE) do |relation_ids|
[MergeRequestDiffCommit, MergeRequestDiffFile].each do |model|
loop do
inner_query = model
@@ -180,6 +181,23 @@ module Projects
end
# rubocop: enable CodeReuse/ActiveRecord
+ # rubocop: disable CodeReuse/ActiveRecord
+ def destroy_merge_request_diffs!
+ delete_batch_size = 1000
+
+ project.merge_requests.each_batch(column: :iid, of: BATCH_SIZE) do |relation|
+ loop do
+ deleted_rows = MergeRequestDiff
+ .where(merge_request: relation)
+ .limit(delete_batch_size)
+ .delete_all
+
+ break if deleted_rows == 0
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
def destroy_ci_records!
# Make sure to destroy this first just in case the project is undergoing stats refresh.
# This is to avoid logging the artifact deletion in Ci::JobArtifacts::DestroyBatchService.
diff --git a/app/services/projects/prometheus/alerts/notify_service.rb b/app/services/projects/prometheus/alerts/notify_service.rb
index 6265a74fad2..9f260345937 100644
--- a/app/services/projects/prometheus/alerts/notify_service.rb
+++ b/app/services/projects/prometheus/alerts/notify_service.rb
@@ -3,9 +3,8 @@
module Projects
module Prometheus
module Alerts
- class NotifyService
+ class NotifyService < ::BaseProjectService
include Gitlab::Utils::StrongMemoize
- include ::IncidentManagement::Settings
include ::AlertManagement::Responses
# This set of keys identifies a payload as a valid Prometheus
@@ -26,14 +25,13 @@ module Projects
# https://gitlab.com/gitlab-com/gl-infra/production/-/issues/6086
PROCESS_MAX_ALERTS = 100
- def initialize(project, payload)
- @project = project
- @payload = payload
+ def initialize(project, params)
+ super(project: project, params: params.to_h)
end
def execute(token, integration = nil)
return bad_request unless valid_payload_size?
- return unprocessable_entity unless self.class.processable?(payload)
+ return unprocessable_entity unless self.class.processable?(params)
return unauthorized unless valid_alert_manager_token?(token, integration)
truncate_alerts! if max_alerts_exceeded?
@@ -53,10 +51,8 @@ module Projects
private
- attr_reader :project, :payload
-
def valid_payload_size?
- Gitlab::Utils::DeepSize.new(payload.to_h).valid?
+ Gitlab::Utils::DeepSize.new(params).valid?
end
def max_alerts_exceeded?
@@ -75,11 +71,11 @@ module Projects
}
)
- payload['alerts'] = alerts.first(PROCESS_MAX_ALERTS)
+ params['alerts'] = alerts.first(PROCESS_MAX_ALERTS)
end
def alerts
- payload['alerts']
+ params['alerts']
end
def valid_alert_manager_token?(token, integration)
@@ -152,7 +148,7 @@ module Projects
def process_prometheus_alerts
alerts.map do |alert|
AlertManagement::ProcessPrometheusAlertService
- .new(project, alert.to_h)
+ .new(project, alert)
.execute
end
end
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index dd1c2b94e18..bf90783fcbe 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -65,11 +65,20 @@ module Projects
def build_commit_status
GenericCommitStatus.new(
user: build.user,
- stage: 'deploy',
+ ci_stage: stage,
name: 'pages:deploy'
)
end
+ # rubocop: disable Performance/ActiveRecordSubtransactionMethods
+ def stage
+ build.pipeline.stages.safe_find_or_create_by(name: 'deploy', pipeline_id: build.pipeline.id) do |stage|
+ stage.position = GenericCommitStatus::EXTERNAL_STAGE_IDX
+ stage.project = build.project
+ end
+ end
+ # rubocop: enable Performance/ActiveRecordSubtransactionMethods
+
def create_pages_deployment(artifacts_path, build)
sha256 = build.job_artifacts_archive.file_sha256
File.open(artifacts_path) do |file|
diff --git a/app/services/releases/create_service.rb b/app/services/releases/create_service.rb
index 2588d2187a5..b7df201824a 100644
--- a/app/services/releases/create_service.rb
+++ b/app/services/releases/create_service.rb
@@ -4,6 +4,7 @@ module Releases
class CreateService < Releases::BaseService
def execute
return error('Access Denied', 403) unless allowed?
+ return error('You are not allowed to create this tag as it is protected.', 403) unless can_create_tag?
return error('Release already exists', 409) if release
return error("Milestone(s) not found: #{inexistent_milestones.join(', ')}", 400) if inexistent_milestones.any?
@@ -38,7 +39,7 @@ module Releases
end
def allowed?
- Ability.allowed?(current_user, :create_release, project) && can_create_tag?
+ Ability.allowed?(current_user, :create_release, project)
end
def can_create_tag?
diff --git a/app/services/resource_events/change_labels_service.rb b/app/services/resource_events/change_labels_service.rb
index 04f917ec8ef..7e176f95db0 100644
--- a/app/services/resource_events/change_labels_service.rb
+++ b/app/services/resource_events/change_labels_service.rb
@@ -29,7 +29,10 @@ module ResourceEvents
resource.expire_note_etag_cache
- Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_label_changed_action(author: user) if resource.is_a?(Issue)
+ return unless resource.is_a?(Issue)
+
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_label_changed_action(author: user,
+ project: resource.project)
end
private
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index 89cb14e6fff..7fd0fb10b4b 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -18,41 +18,20 @@ module ServicePing
def execute
return unless ServicePing::ServicePingSettings.product_intelligence_enabled?
- start = Time.current
- begin
- usage_data = payload || ServicePing::BuildPayload.new.execute
- response = submit_usage_data_payload(usage_data)
- rescue StandardError => e
- return unless Gitlab::CurrentSettings.usage_ping_enabled?
-
- error_payload = {
- time: Time.current,
- uuid: Gitlab::CurrentSettings.uuid,
- hostname: Gitlab.config.gitlab.host,
- version: Gitlab.version_info.to_s,
- message: "#{e.message.presence || e.class} at #{e.backtrace[0]}",
- elapsed: (Time.current - start).round(1)
- }
- submit_payload({ error: error_payload }, path: ERROR_PATH)
+ start_time = Time.current
- usage_data = payload || Gitlab::Usage::ServicePingReport.for(output: :all_metrics_values)
- response = submit_usage_data_payload(usage_data)
- end
+ begin
+ response = submit_usage_data_payload
- version_usage_data_id =
- response.dig('conv_index', 'usage_data_id') || response.dig('dev_ops_score', 'usage_data_id')
+ raise SubmissionError, "Unsuccessful response code: #{response.code}" unless response.success?
- unless version_usage_data_id.is_a?(Integer) && version_usage_data_id > 0
- raise SubmissionError, "Invalid usage_data_id in response: #{version_usage_data_id}"
- end
+ handle_response(response)
+ submit_metadata_payload
+ rescue StandardError => e
+ submit_error_payload(e, start_time)
- unless skip_db_write
- raw_usage_data = save_raw_usage_data(usage_data)
- raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
- ServicePing::DevopsReport.new(response).execute
+ raise
end
-
- submit_payload(metadata(usage_data), path: METADATA_PATH)
end
private
@@ -90,14 +69,43 @@ module ServicePing
)
end
- def submit_usage_data_payload(usage_data)
- raise SubmissionError, 'Usage data is blank' if usage_data.blank?
+ def submit_usage_data_payload
+ raise SubmissionError, 'Usage data payload is blank' if payload.blank?
+
+ submit_payload(payload)
+ end
+
+ def handle_response(response)
+ version_usage_data_id =
+ response.dig('conv_index', 'usage_data_id') || response.dig('dev_ops_score', 'usage_data_id')
- response = submit_payload(usage_data)
+ unless version_usage_data_id.is_a?(Integer) && version_usage_data_id > 0
+ raise SubmissionError, "Invalid usage_data_id in response: #{version_usage_data_id}"
+ end
- raise SubmissionError, "Unsuccessful response code: #{response.code}" unless response.success?
+ return if skip_db_write
+
+ raw_usage_data = save_raw_usage_data(payload)
+ raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
+ ServicePing::DevopsReport.new(response).execute
+ end
+
+ def submit_error_payload(error, start_time)
+ current_time = Time.current
+ error_payload = {
+ time: current_time,
+ uuid: Gitlab::CurrentSettings.uuid,
+ hostname: Gitlab.config.gitlab.host,
+ version: Gitlab.version_info.to_s,
+ message: "#{error.message.presence || error.class} at #{error.backtrace[0]}",
+ elapsed: (current_time - start_time).round(1)
+ }
+
+ submit_payload({ error: error_payload }, path: ERROR_PATH)
+ end
- response
+ def submit_metadata_payload
+ submit_payload(metadata(payload), path: METADATA_PATH)
end
def save_raw_usage_data(usage_data)
diff --git a/app/services/service_response.rb b/app/services/service_response.rb
index c7ab75a4426..848f90e7f25 100644
--- a/app/services/service_response.rb
+++ b/app/services/service_response.rb
@@ -2,20 +2,28 @@
class ServiceResponse
def self.success(message: nil, payload: {}, http_status: :ok)
- new(status: :success, message: message, payload: payload, http_status: http_status)
+ new(status: :success,
+ message: message,
+ payload: payload,
+ http_status: http_status)
end
- def self.error(message:, payload: {}, http_status: nil)
- new(status: :error, message: message, payload: payload, http_status: http_status)
+ def self.error(message:, payload: {}, http_status: nil, reason: nil)
+ new(status: :error,
+ message: message,
+ payload: payload,
+ http_status: http_status,
+ reason: reason)
end
- attr_reader :status, :message, :http_status, :payload
+ attr_reader :status, :message, :http_status, :payload, :reason
- def initialize(status:, message: nil, payload: {}, http_status: nil)
+ def initialize(status:, message: nil, payload: {}, http_status: nil, reason: nil)
self.status = status
self.message = message
self.payload = payload
self.http_status = http_status
+ self.reason = reason
end
def track_exception(as: StandardError, **extra_data)
@@ -41,7 +49,11 @@ class ServiceResponse
end
def to_h
- (payload || {}).merge(status: status, message: message, http_status: http_status)
+ (payload || {}).merge(
+ status: status,
+ message: message,
+ http_status: http_status,
+ reason: reason)
end
def success?
@@ -60,5 +72,5 @@ class ServiceResponse
private
- attr_writer :status, :message, :http_status, :payload
+ attr_writer :status, :message, :http_status, :payload, :reason
end
diff --git a/app/services/snippets/base_service.rb b/app/services/snippets/base_service.rb
index 1a04c4fcedd..42e62d65ee4 100644
--- a/app/services/snippets/base_service.rb
+++ b/app/services/snippets/base_service.rb
@@ -73,6 +73,15 @@ module Snippets
message
end
+ def file_paths_to_commit
+ paths = []
+ snippet_actions.to_commit_actions.each do |action|
+ paths << { path: action[:file_path] }
+ end
+
+ paths
+ end
+
def files_to_commit(snippet)
snippet_actions.to_commit_actions.presence || build_actions_from_params(snippet)
end
diff --git a/app/services/snippets/bulk_destroy_service.rb b/app/services/snippets/bulk_destroy_service.rb
index 6eab9fb320e..9c6e1c14051 100644
--- a/app/services/snippets/bulk_destroy_service.rb
+++ b/app/services/snippets/bulk_destroy_service.rb
@@ -14,10 +14,10 @@ module Snippets
@snippets = snippets
end
- def execute(options = {})
+ def execute(skip_authorization: false)
return ServiceResponse.success(message: 'No snippets found.') if snippets.empty?
- user_can_delete_snippets! unless options[:hard_delete]
+ user_can_delete_snippets! unless skip_authorization
attempt_delete_repositories!
snippets.destroy_all # rubocop: disable Cop/DestroyAll
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index 6d3b63de9fd..e0bab4cd6ad 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -24,7 +24,8 @@ module Snippets
spammable: @snippet,
spam_params: spam_params,
user: current_user,
- action: :create
+ action: :create,
+ extra_features: { files: file_paths_to_commit }
).execute
if save_and_commit
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index 76d5063c337..067680f2abc 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -23,11 +23,14 @@ module Snippets
update_snippet_attributes(snippet)
+ files = snippet.all_files.map { |f| { path: f } } + file_paths_to_commit
+
Spam::SpamActionService.new(
spammable: snippet,
spam_params: spam_params,
user: current_user,
- action: :update
+ action: :update,
+ extra_features: { files: files }
).execute
if save_and_commit(snippet)
diff --git a/app/services/spam/spam_action_service.rb b/app/services/spam/spam_action_service.rb
index 4fa9c0e4993..9c52e9f0cd3 100644
--- a/app/services/spam/spam_action_service.rb
+++ b/app/services/spam/spam_action_service.rb
@@ -4,11 +4,12 @@ module Spam
class SpamActionService
include SpamConstants
- def initialize(spammable:, spam_params:, user:, action:)
+ def initialize(spammable:, spam_params:, user:, action:, extra_features: {})
@target = spammable
@spam_params = spam_params
@user = user
@action = action
+ @extra_features = extra_features
end
# rubocop:disable Metrics/AbcSize
@@ -40,7 +41,7 @@ module Spam
private
- attr_reader :user, :action, :target, :spam_params, :spam_log
+ attr_reader :user, :action, :target, :spam_params, :spam_log, :extra_features
##
# In order to be proceed to the spam check process, the target must be
@@ -124,7 +125,9 @@ module Spam
SpamVerdictService.new(target: target,
user: user,
options: options,
- context: context)
+ context: context,
+ extra_features: extra_features
+ )
end
def noteable_type
diff --git a/app/services/spam/spam_constants.rb b/app/services/spam/spam_constants.rb
index d300525710c..9ac3bcf8a1d 100644
--- a/app/services/spam/spam_constants.rb
+++ b/app/services/spam/spam_constants.rb
@@ -2,6 +2,7 @@
module Spam
module SpamConstants
+ ERROR_TYPE = 'spamcheck'
BLOCK_USER = 'block'
DISALLOW = 'disallow'
CONDITIONAL_ALLOW = 'conditional_allow'
diff --git a/app/services/spam/spam_verdict_service.rb b/app/services/spam/spam_verdict_service.rb
index e73b2666c02..08634ec840c 100644
--- a/app/services/spam/spam_verdict_service.rb
+++ b/app/services/spam/spam_verdict_service.rb
@@ -5,11 +5,12 @@ module Spam
include AkismetMethods
include SpamConstants
- def initialize(user:, target:, options:, context: {})
+ def initialize(user:, target:, options:, context: {}, extra_features: {})
@target = target
@user = user
@options = options
@context = context
+ @extra_features = extra_features
end
def execute
@@ -61,7 +62,7 @@ module Spam
private
- attr_reader :user, :target, :options, :context
+ attr_reader :user, :target, :options, :context, :extra_features
def akismet_verdict
if akismet.spam?
@@ -75,7 +76,8 @@ module Spam
return unless Gitlab::CurrentSettings.spam_check_endpoint_enabled
begin
- result, attribs, _error = spamcheck_client.issue_spam?(spam_issue: target, user: user, context: context)
+ result, attribs, _error = spamcheck_client.spam?(spammable: target, user: user, context: context,
+ extra_features: extra_features)
# @TODO log if error is not nil https://gitlab.com/gitlab-org/gitlab/-/issues/329545
return [nil, attribs] unless result
@@ -83,7 +85,7 @@ module Spam
[result, attribs]
rescue StandardError => e
- Gitlab::ErrorTracking.log_exception(e)
+ Gitlab::ErrorTracking.log_exception(e, error: ERROR_TYPE)
# Default to ALLOW if any errors occur
[ALLOW, attribs, true]
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index 75903fde39e..7275a05d2ce 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -14,6 +14,13 @@ module SystemNotes
# See also the discussion in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60700#note_612724683
USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE = false
+ def self.issuable_events
+ {
+ review_requested: s_('IssuableEvents|requested review from'),
+ review_request_removed: s_('IssuableEvents|removed review request for')
+ }.freeze
+ end
+
#
# noteable_ref - Referenced noteable object
#
@@ -26,7 +33,7 @@ module SystemNotes
issuable_type = noteable.to_ability_name.humanize(capitalize: false)
body = "marked this #{issuable_type} as related to #{noteable_ref.to_reference(noteable.resource_parent)}"
- issue_activity_counter.track_issue_related_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_related_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'relate'))
end
@@ -42,7 +49,7 @@ module SystemNotes
def unrelate_issuable(noteable_ref)
body = "removed the relation with #{noteable_ref.to_reference(noteable.resource_parent)}"
- issue_activity_counter.track_issue_unrelated_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_unrelated_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'unrelate'))
end
@@ -61,7 +68,7 @@ module SystemNotes
def change_assignee(assignee)
body = assignee.nil? ? 'removed assignee' : "assigned to #{assignee.to_reference}"
- issue_activity_counter.track_issue_assignee_changed_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_assignee_changed_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'assignee'))
end
@@ -93,7 +100,7 @@ module SystemNotes
body = text_parts.join(' and ')
- issue_activity_counter.track_issue_assignee_changed_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_assignee_changed_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'assignee'))
end
@@ -115,8 +122,8 @@ module SystemNotes
text_parts = []
Gitlab::I18n.with_default_locale do
- text_parts << "requested review from #{added_users.map(&:to_reference).to_sentence}" if added_users.any?
- text_parts << "removed review request for #{unassigned_users.map(&:to_reference).to_sentence}" if unassigned_users.any?
+ text_parts << "#{self.class.issuable_events[:review_requested]} #{added_users.map(&:to_reference).to_sentence}" if added_users.any?
+ text_parts << "#{self.class.issuable_events[:review_request_removed]} #{unassigned_users.map(&:to_reference).to_sentence}" if unassigned_users.any?
end
body = text_parts.join(' and ')
@@ -172,7 +179,7 @@ module SystemNotes
body = "changed title from **#{marked_old_title}** to **#{marked_new_title}**"
- issue_activity_counter.track_issue_title_changed_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_title_changed_action)
work_item_activity_counter.track_work_item_title_changed_action(author: author) if noteable.is_a?(WorkItem)
create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
@@ -210,7 +217,7 @@ module SystemNotes
def change_description
body = 'changed the description'
- issue_activity_counter.track_issue_description_changed_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_description_changed_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'description'))
end
@@ -246,6 +253,7 @@ module SystemNotes
)
else
track_cross_reference_action
+
created_at = mentioner.created_at if USE_COMMIT_DATE_FOR_CROSS_REFERENCE_NOTE && mentioner.is_a?(Commit)
create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference', created_at: created_at))
end
@@ -280,7 +288,7 @@ module SystemNotes
status_label = new_task.complete? ? Taskable::COMPLETED : Taskable::INCOMPLETE
body = "marked the checklist item **#{new_task.source}** as #{status_label}"
- issue_activity_counter.track_issue_description_changed_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_description_changed_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'task'))
end
@@ -303,7 +311,7 @@ module SystemNotes
cross_reference = noteable_ref.to_reference(project)
body = "moved #{direction} #{cross_reference}"
- issue_activity_counter.track_issue_moved_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_moved_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'moved'))
end
@@ -327,9 +335,7 @@ module SystemNotes
cross_reference = noteable_ref.to_reference(project)
body = "cloned #{direction} #{cross_reference}"
- if noteable.is_a?(Issue) && direction == :to
- issue_activity_counter.track_issue_cloned_action(author: author, project: project)
- end
+ track_issue_event(:track_issue_cloned_action) if direction == :to
create_note(NoteSummary.new(noteable, project, author, body, action: 'cloned', created_at: created_at))
end
@@ -346,12 +352,12 @@ module SystemNotes
body = 'made the issue confidential'
action = 'confidential'
- issue_activity_counter.track_issue_made_confidential_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_made_confidential_action)
else
body = 'made the issue visible to everyone'
action = 'visible'
- issue_activity_counter.track_issue_made_visible_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_made_visible_action)
end
create_note(NoteSummary.new(noteable, project, author, body, action: action))
@@ -418,7 +424,7 @@ module SystemNotes
def mark_duplicate_issue(canonical_issue)
body = "marked this issue as a duplicate of #{canonical_issue.to_reference(project)}"
- issue_activity_counter.track_issue_marked_as_duplicate_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_marked_as_duplicate_action)
create_note(NoteSummary.new(noteable, project, author, body, action: 'duplicate'))
end
@@ -431,12 +437,10 @@ module SystemNotes
action = noteable.discussion_locked? ? 'locked' : 'unlocked'
body = "#{action} this #{noteable.class.to_s.titleize.downcase}"
- if noteable.is_a?(Issue)
- if action == 'locked'
- issue_activity_counter.track_issue_locked_action(author: author)
- else
- issue_activity_counter.track_issue_unlocked_action(author: author)
- end
+ if action == 'locked'
+ track_issue_event(:track_issue_locked_action)
+ else
+ track_issue_event(:track_issue_unlocked_action)
end
create_note(NoteSummary.new(noteable, project, author, body, action: action))
@@ -495,7 +499,7 @@ module SystemNotes
end
def track_cross_reference_action
- issue_activity_counter.track_issue_cross_referenced_action(author: author) if noteable.is_a?(Issue)
+ track_issue_event(:track_issue_cross_referenced_action)
end
def hierarchy_note_params(action, parent, child)
@@ -520,6 +524,12 @@ module SystemNotes
}
end
end
+
+ def track_issue_event(event_name)
+ return unless noteable.is_a?(Issue)
+
+ issue_activity_counter.public_send(event_name, author: author, project: project || noteable.project) # rubocop: disable GitlabSecurity/PublicSend
+ end
end
end
diff --git a/app/services/system_notes/time_tracking_service.rb b/app/services/system_notes/time_tracking_service.rb
index 68df52a03c7..c5bdbc6799e 100644
--- a/app/services/system_notes/time_tracking_service.rb
+++ b/app/services/system_notes/time_tracking_service.rb
@@ -21,7 +21,7 @@ module SystemNotes
# Using instance_of because WorkItem < Issue. We don't want to track work item updates as issue updates
if noteable.instance_of?(Issue) && changed_dates.key?('due_date')
- issue_activity_counter.track_issue_due_date_changed_action(author: author)
+ issue_activity_counter.track_issue_due_date_changed_action(author: author, project: project)
end
work_item_activity_counter.track_work_item_date_changed_action(author: author) if noteable.is_a?(WorkItem)
@@ -50,7 +50,9 @@ module SystemNotes
"changed time estimate to #{parsed_time}"
end
- issue_activity_counter.track_issue_time_estimate_changed_action(author: author) if noteable.is_a?(Issue)
+ if noteable.is_a?(Issue)
+ issue_activity_counter.track_issue_time_estimate_changed_action(author: author, project: project)
+ end
create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
end
@@ -81,7 +83,9 @@ module SystemNotes
body = text_parts.join(' ')
end
- issue_activity_counter.track_issue_time_spent_changed_action(author: author) if noteable.is_a?(Issue)
+ if noteable.is_a?(Issue)
+ issue_activity_counter.track_issue_time_spent_changed_action(author: author, project: project)
+ end
create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
end
@@ -107,7 +111,9 @@ module SystemNotes
text_parts << "at #{spent_at}" if spent_at && spent_at != DateTime.current.to_date
body = text_parts.join(' ')
- issue_activity_counter.track_issue_time_spent_changed_action(author: author) if noteable.is_a?(Issue)
+ if noteable.is_a?(Issue)
+ issue_activity_counter.track_issue_time_spent_changed_action(author: author, project: project)
+ end
create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
end
diff --git a/app/services/topics/merge_service.rb b/app/services/topics/merge_service.rb
index 0d256579fe0..58f3d5305b4 100644
--- a/app/services/topics/merge_service.rb
+++ b/app/services/topics/merge_service.rb
@@ -17,14 +17,21 @@ module Topics
refresh_target_topic_counters
delete_source_topic
end
+
+ ServiceResponse.success
+ rescue ArgumentError => e
+ ServiceResponse.error(message: e.message)
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_exception(e, source_topic_id: source_topic.id, target_topic_id: target_topic.id)
+ ServiceResponse.error(message: _('Topics could not be merged!'))
end
private
def validate_parameters!
- raise ArgumentError, 'The source topic is not a topic.' unless source_topic.is_a?(Projects::Topic)
- raise ArgumentError, 'The target topic is not a topic.' unless target_topic.is_a?(Projects::Topic)
- raise ArgumentError, 'The source topic and the target topic are identical.' if source_topic == target_topic
+ raise ArgumentError, _('The source topic is not a topic.') unless source_topic.is_a?(Projects::Topic)
+ raise ArgumentError, _('The target topic is not a topic.') unless target_topic.is_a?(Projects::Topic)
+ raise ArgumentError, _('The source topic and the target topic are identical.') if source_topic == target_topic
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/services/users/authorized_build_service.rb b/app/services/users/authorized_build_service.rb
index eb2386198d3..5029105b087 100644
--- a/app/services/users/authorized_build_service.rb
+++ b/app/services/users/authorized_build_service.rb
@@ -16,3 +16,5 @@ module Users
end
end
end
+
+Users::AuthorizedBuildService.prepend_mod_with('Users::AuthorizedBuildService')
diff --git a/app/services/users/destroy_service.rb b/app/services/users/destroy_service.rb
index dfa9316889e..a378cb09854 100644
--- a/app/services/users/destroy_service.rb
+++ b/app/services/users/destroy_service.rb
@@ -23,6 +23,11 @@ module Users
# `hard_delete: true` implies `delete_solo_owned_groups: true`. To perform
# a hard deletion without destroying solo-owned groups, pass
# `delete_solo_owned_groups: false, hard_delete: true` in +options+.
+ #
+ # To make the service asynchronous, a new behaviour is being introduced
+ # behind the user_destroy_with_limited_execution_time_worker feature flag.
+ # Migrating the associated user records, and post-migration cleanup is
+ # handled by the Users::MigrateRecordsToGhostUserWorker cron worker.
def execute(user, options = {})
delete_solo_owned_groups = options.fetch(:delete_solo_owned_groups, options[:hard_delete])
@@ -35,12 +40,14 @@ module Users
return user
end
- # Calling all before/after_destroy hooks for the user because
- # there is no dependent: destroy in the relationship. And the removal
- # is done by a foreign_key. Otherwise they won't be called
- user.members.find_each { |member| member.run_callbacks(:destroy) }
+ user.block
+
+ # Load the records. Groups are unavailable after membership is destroyed.
+ solo_owned_groups = user.solo_owned_groups.load
+
+ user.members.each_batch { |batch| batch.destroy_all } # rubocop:disable Style/SymbolProc, Cop/DestroyAll
- user.solo_owned_groups.each do |group|
+ solo_owned_groups.each do |group|
Groups::DestroyService.new(group, current_user).execute
end
@@ -54,22 +61,32 @@ module Users
yield(user) if block_given?
- MigrateToGhostUserService.new(user).execute(hard_delete: options[:hard_delete])
+ hard_delete = options.fetch(:hard_delete, false)
- response = Snippets::BulkDestroyService.new(current_user, user.snippets).execute(options)
- raise DestroyError, response.message if response.error?
+ if Feature.enabled?(:user_destroy_with_limited_execution_time_worker)
+ Users::GhostUserMigration.create!(user: user,
+ initiator_user: current_user,
+ hard_delete: hard_delete)
- # Rails attempts to load all related records into memory before
- # destroying: https://github.com/rails/rails/issues/22510
- # This ensures we delete records in batches.
- user.destroy_dependent_associations_in_batches(exclude: [:snippets])
- user.nullify_dependent_associations_in_batches
+ else
+ MigrateToGhostUserService.new(user).execute(hard_delete: options[:hard_delete])
- # Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
- user_data = user.destroy
- namespace.destroy
+ response = Snippets::BulkDestroyService.new(current_user, user.snippets)
+ .execute(skip_authorization: hard_delete)
+ raise DestroyError, response.message if response.error?
- user_data
+ # Rails attempts to load all related records into memory before
+ # destroying: https://github.com/rails/rails/issues/22510
+ # This ensures we delete records in batches.
+ user.destroy_dependent_associations_in_batches(exclude: [:snippets])
+ user.nullify_dependent_associations_in_batches
+
+ # Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
+ user_data = user.destroy
+ namespace.destroy
+
+ user_data
+ end
end
end
end
diff --git a/app/services/users/email_verification/base_service.rb b/app/services/users/email_verification/base_service.rb
new file mode 100644
index 00000000000..3337beec195
--- /dev/null
+++ b/app/services/users/email_verification/base_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Users
+ module EmailVerification
+ class BaseService
+ VALID_ATTRS = %i[unlock_token confirmation_token].freeze
+
+ def initialize(attr:)
+ @attr = attr
+
+ validate_attr!
+ end
+
+ protected
+
+ attr_reader :attr, :token
+
+ def validate_attr!
+ raise ArgumentError, 'Invalid attribute' unless attr.in?(VALID_ATTRS)
+ end
+
+ def digest
+ Devise.token_generator.digest(User, attr, token)
+ end
+ end
+ end
+end
diff --git a/app/services/users/email_verification/generate_token_service.rb b/app/services/users/email_verification/generate_token_service.rb
new file mode 100644
index 00000000000..6f0237ce244
--- /dev/null
+++ b/app/services/users/email_verification/generate_token_service.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Users
+ module EmailVerification
+ class GenerateTokenService < EmailVerification::BaseService
+ TOKEN_LENGTH = 6
+
+ def execute
+ @token = generate_token
+
+ [token, digest]
+ end
+
+ private
+
+ def generate_token
+ SecureRandom.random_number(10**TOKEN_LENGTH).to_s.rjust(TOKEN_LENGTH, '0')
+ end
+ end
+ end
+end
diff --git a/app/services/users/email_verification/validate_token_service.rb b/app/services/users/email_verification/validate_token_service.rb
new file mode 100644
index 00000000000..b1b34e94f49
--- /dev/null
+++ b/app/services/users/email_verification/validate_token_service.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module Users
+ module EmailVerification
+ class ValidateTokenService < EmailVerification::BaseService
+ include ActionView::Helpers::DateHelper
+
+ TOKEN_VALID_FOR_MINUTES = 60
+
+ def initialize(attr:, user:, token:)
+ super(attr: attr)
+
+ @user = user
+ @token = token
+ end
+
+ def execute
+ return failure(:rate_limited) if verification_rate_limited?
+ return failure(:invalid) unless valid?
+ return failure(:expired) if expired_token?
+
+ success
+ end
+
+ private
+
+ attr_reader :user
+
+ def verification_rate_limited?
+ Gitlab::ApplicationRateLimiter.throttled?(:email_verification, scope: user[attr])
+ end
+
+ def valid?
+ return false unless token.present?
+
+ Devise.secure_compare(user[attr], digest)
+ end
+
+ def expired_token?
+ generated_at = case attr
+ when :unlock_token then user.locked_at
+ when :confirmation_token then user.confirmation_sent_at
+ end
+
+ generated_at < TOKEN_VALID_FOR_MINUTES.minutes.ago
+ end
+
+ def success
+ { status: :success }
+ end
+
+ def failure(reason)
+ {
+ status: :failure,
+ reason: reason,
+ message: failure_message(reason)
+ }
+ end
+
+ def failure_message(reason)
+ case reason
+ when :rate_limited
+ format(s_("IdentityVerification|You've reached the maximum amount of tries. "\
+ 'Wait %{interval} or send a new code and try again.'), interval: email_verification_interval)
+ when :expired
+ s_('IdentityVerification|The code has expired. Send a new code and try again.')
+ when :invalid
+ s_('IdentityVerification|The code is incorrect. Enter it again, or send a new code.')
+ end
+ end
+
+ def email_verification_interval
+ interval_in_seconds = Gitlab::ApplicationRateLimiter.rate_limits[:email_verification][:interval]
+ distance_of_time_in_words(interval_in_seconds)
+ end
+ end
+ end
+end
diff --git a/app/services/users/migrate_records_to_ghost_user_in_batches_service.rb b/app/services/users/migrate_records_to_ghost_user_in_batches_service.rb
new file mode 100644
index 00000000000..7c4a5698ea9
--- /dev/null
+++ b/app/services/users/migrate_records_to_ghost_user_in_batches_service.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module Users
+ class MigrateRecordsToGhostUserInBatchesService
+ def initialize
+ @execution_tracker = Gitlab::Utils::ExecutionTracker.new
+ end
+
+ def execute
+ Users::GhostUserMigration.find_each do |user_to_migrate|
+ break if execution_tracker.over_limit?
+
+ service = Users::MigrateRecordsToGhostUserService.new(user_to_migrate.user,
+ user_to_migrate.initiator_user,
+ execution_tracker)
+ service.execute(hard_delete: user_to_migrate.hard_delete)
+ end
+ rescue Gitlab::Utils::ExecutionTracker::ExecutionTimeOutError
+ # no-op
+ end
+
+ private
+
+ attr_reader :execution_tracker
+ end
+end
diff --git a/app/services/users/migrate_records_to_ghost_user_service.rb b/app/services/users/migrate_records_to_ghost_user_service.rb
new file mode 100644
index 00000000000..2d92aaed7da
--- /dev/null
+++ b/app/services/users/migrate_records_to_ghost_user_service.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+# When a user is destroyed, some of their associated records are
+# moved to a "Ghost User", to prevent these associated records from
+# being destroyed.
+#
+# For example, all the issues/MRs a user has created are _not_ destroyed
+# when the user is destroyed.
+module Users
+ class MigrateRecordsToGhostUserService
+ extend ActiveSupport::Concern
+
+ DestroyError = Class.new(StandardError)
+
+ attr_reader :ghost_user, :user, :initiator_user, :hard_delete
+
+ def initialize(user, initiator_user, execution_tracker)
+ @user = user
+ @initiator_user = initiator_user
+ @execution_tracker = execution_tracker
+ @ghost_user = User.ghost
+ end
+
+ def execute(hard_delete: false)
+ @hard_delete = hard_delete
+
+ migrate_records
+ post_migrate_records
+ end
+
+ private
+
+ attr_reader :execution_tracker
+
+ def migrate_records
+ return if hard_delete
+
+ migrate_issues
+ migrate_merge_requests
+ migrate_notes
+ migrate_abuse_reports
+ migrate_award_emoji
+ migrate_snippets
+ migrate_reviews
+ end
+
+ def post_migrate_records
+ delete_snippets
+
+ # Rails attempts to load all related records into memory before
+ # destroying: https://github.com/rails/rails/issues/22510
+ # This ensures we delete records in batches.
+ user.destroy_dependent_associations_in_batches(exclude: [:snippets])
+ user.nullify_dependent_associations_in_batches
+
+ # Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
+ user_data = user.destroy
+ user.namespace.destroy
+
+ user_data
+ end
+
+ def delete_snippets
+ response = Snippets::BulkDestroyService.new(initiator_user, user.snippets).execute(skip_authorization: true)
+ raise DestroyError, response.message if response.error?
+ end
+
+ def migrate_issues
+ batched_migrate(Issue, :author_id)
+ batched_migrate(Issue, :last_edited_by_id)
+ end
+
+ def migrate_merge_requests
+ batched_migrate(MergeRequest, :author_id)
+ batched_migrate(MergeRequest, :merge_user_id)
+ end
+
+ def migrate_notes
+ batched_migrate(Note, :author_id)
+ end
+
+ def migrate_abuse_reports
+ user.reported_abuse_reports.update_all(reporter_id: ghost_user.id)
+ end
+
+ def migrate_award_emoji
+ user.award_emoji.update_all(user_id: ghost_user.id)
+ end
+
+ def migrate_snippets
+ snippets = user.snippets.only_project_snippets
+ snippets.update_all(author_id: ghost_user.id)
+ end
+
+ def migrate_reviews
+ batched_migrate(Review, :author_id)
+ end
+
+ # rubocop:disable CodeReuse/ActiveRecord
+ def batched_migrate(base_scope, column, batch_size: 50)
+ loop do
+ update_count = base_scope.where(column => user.id).limit(batch_size).update_all(column => ghost_user.id)
+ break if update_count == 0
+ raise Gitlab::Utils::ExecutionTracker::ExecutionTimeOutError if execution_tracker.over_limit?
+ end
+ end
+ # rubocop:enable CodeReuse/ActiveRecord
+ end
+end
+
+Users::MigrateRecordsToGhostUserService.prepend_mod_with('Users::MigrateRecordsToGhostUserService')