Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-03-16 21:18:33 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-03-16 21:18:33 +0300
commitf64a639bcfa1fc2bc89ca7db268f594306edfd7c (patch)
treea2c3c2ebcc3b45e596949db485d6ed18ffaacfa1 /app/services
parentbfbc3e0d6583ea1a91f627528bedc3d65ba4b10f (diff)
Add latest changes from gitlab-org/gitlab@13-10-stable-eev13.10.0-rc40
Diffstat (limited to 'app/services')
-rw-r--r--app/services/alert_management/create_alert_issue_service.rb11
-rw-r--r--app/services/authorized_project_update/periodic_recalculate_service.rb4
-rw-r--r--app/services/boards/base_item_move_service.rb72
-rw-r--r--app/services/boards/base_items_list_service.rb16
-rw-r--r--app/services/boards/issues/list_service.rb16
-rw-r--r--app/services/boards/issues/move_service.rb75
-rw-r--r--app/services/boards/list_service.rb32
-rw-r--r--app/services/boards/lists/list_service.rb21
-rw-r--r--app/services/boards/lists/update_service.rb4
-rw-r--r--app/services/boards/update_service.rb11
-rw-r--r--app/services/bulk_import_service.rb7
-rw-r--r--app/services/ci/build_report_result_service.rb3
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb2
-rw-r--r--app/services/ci/create_pipeline_service.rb4
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb49
-rw-r--r--app/services/ci/expire_pipeline_cache_service.rb33
-rw-r--r--app/services/ci/job_artifacts_destroy_batch_service.rb72
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service.rb4
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb2
-rw-r--r--app/services/ci/process_pipeline_service.rb2
-rw-r--r--app/services/ci/register_job_service.rb193
-rw-r--r--app/services/ci/retry_build_service.rb4
-rw-r--r--app/services/ci/retry_pipeline_service.rb2
-rw-r--r--app/services/ci/update_build_queue_service.rb11
-rw-r--r--app/services/clusters/kubernetes.rb2
-rw-r--r--app/services/clusters/kubernetes/create_or_update_service_account_service.rb32
-rw-r--r--app/services/concerns/alert_management/alert_processing.rb19
-rw-r--r--app/services/dependency_proxy/find_or_create_manifest_service.rb7
-rw-r--r--app/services/dependency_proxy/head_manifest_service.rb6
-rw-r--r--app/services/dependency_proxy/pull_manifest_service.rb4
-rw-r--r--app/services/deployments/older_deployments_drop_service.rb2
-rw-r--r--app/services/deployments/update_environment_service.rb21
-rw-r--r--app/services/environments/schedule_to_delete_review_apps_service.rb102
-rw-r--r--app/services/groups/create_service.rb2
-rw-r--r--app/services/groups/destroy_service.rb4
-rw-r--r--app/services/groups/group_links/create_service.rb2
-rw-r--r--app/services/groups/group_links/destroy_service.rb2
-rw-r--r--app/services/groups/group_links/update_service.rb2
-rw-r--r--app/services/import/github_service.rb34
-rw-r--r--app/services/issuable/clone/base_service.rb11
-rw-r--r--app/services/issuable/process_assignees.rb36
-rw-r--r--app/services/issue_rebalancing_service.rb5
-rw-r--r--app/services/issues/clone_service.rb1
-rw-r--r--app/services/issues/create_service.rb1
-rw-r--r--app/services/issues/move_service.rb15
-rw-r--r--app/services/jira_import/users_importer.rb2
-rw-r--r--app/services/members/invite_service.rb97
-rw-r--r--app/services/merge_requests/after_create_service.rb10
-rw-r--r--app/services/merge_requests/base_service.rb2
-rw-r--r--app/services/merge_requests/build_service.rb16
-rw-r--r--app/services/merge_requests/merge_service.rb3
-rw-r--r--app/services/merge_requests/post_merge_service.rb29
-rw-r--r--app/services/merge_requests/refresh_service.rb13
-rw-r--r--app/services/merge_requests/retarget_chain_service.rb34
-rw-r--r--app/services/merge_requests/update_service.rb191
-rw-r--r--app/services/namespaces/in_product_marketing_emails_service.rb5
-rw-r--r--app/services/notes/build_service.rb30
-rw-r--r--app/services/notes/update_service.rb16
-rw-r--r--app/services/notification_service.rb65
-rw-r--r--app/services/onboarding_progress_service.rb18
-rw-r--r--app/services/packages/composer/create_package_service.rb2
-rw-r--r--app/services/packages/conan/search_service.rb2
-rw-r--r--app/services/packages/create_event_service.rb14
-rw-r--r--app/services/packages/create_temporary_package_service.rb21
-rw-r--r--app/services/packages/debian/find_or_create_incoming_service.rb (renamed from app/services/packages/debian/get_or_create_incoming_service.rb)2
-rw-r--r--app/services/packages/debian/find_or_create_package_service.rb33
-rw-r--r--app/services/packages/maven/find_or_create_package_service.rb3
-rw-r--r--app/services/packages/maven/metadata.rb13
-rw-r--r--app/services/packages/maven/metadata/append_package_file_service.rb88
-rw-r--r--app/services/packages/maven/metadata/base_create_xml_service.rb32
-rw-r--r--app/services/packages/maven/metadata/create_plugins_xml_service.rb92
-rw-r--r--app/services/packages/maven/metadata/create_versions_xml_service.rb165
-rw-r--r--app/services/packages/maven/metadata/sync_service.rb123
-rw-r--r--app/services/packages/nuget/create_package_service.rb23
-rw-r--r--app/services/packages/nuget/update_package_from_metadata_service.rb3
-rw-r--r--app/services/packages/rubygems/dependency_resolver_service.rb43
-rw-r--r--app/services/pages/legacy_storage_lease.rb9
-rw-r--r--app/services/projects/autocomplete_service.rb2
-rw-r--r--app/services/projects/create_service.rb2
-rw-r--r--app/services/projects/destroy_service.rb7
-rw-r--r--app/services/projects/group_links/create_service.rb2
-rw-r--r--app/services/projects/schedule_bulk_repository_shard_moves_service.rb2
-rw-r--r--app/services/projects/update_pages_configuration_service.rb2
-rw-r--r--app/services/projects/update_pages_service.rb1
-rw-r--r--app/services/protected_branches/api_service.rb5
-rw-r--r--app/services/repositories/changelog_service.rb6
-rw-r--r--app/services/security/vulnerability_uuid.rb9
-rw-r--r--app/services/snippets/schedule_bulk_repository_shard_moves_service.rb2
-rw-r--r--app/services/spam/spam_action_service.rb8
-rw-r--r--app/services/system_hooks_service.rb32
-rw-r--r--app/services/system_note_service.rb10
-rw-r--r--app/services/system_notes/alert_management_service.rb15
-rw-r--r--app/services/system_notes/issuables_service.rb8
-rw-r--r--app/services/system_notes/merge_requests_service.rb4
-rw-r--r--app/services/terraform/remote_state_handler.rb2
-rw-r--r--app/services/users/build_service.rb3
-rw-r--r--app/services/users/dismiss_user_callout_service.rb11
-rw-r--r--app/services/users/refresh_authorized_projects_service.rb13
98 files changed, 1723 insertions, 582 deletions
diff --git a/app/services/alert_management/create_alert_issue_service.rb b/app/services/alert_management/create_alert_issue_service.rb
index 58c7402c6c1..a81c2380dad 100644
--- a/app/services/alert_management/create_alert_issue_service.rb
+++ b/app/services/alert_management/create_alert_issue_service.rb
@@ -4,6 +4,9 @@ module AlertManagement
class CreateAlertIssueService
include Gitlab::Utils::StrongMemoize
+ DEFAULT_ALERT_TITLE = ::Gitlab::AlertManagement::Payload::Generic::DEFAULT_TITLE
+ DEFAULT_INCIDENT_TITLE = 'New: Incident'
+
# @param alert [AlertManagement::Alert]
# @param user [User]
def initialize(alert, user)
@@ -21,6 +24,8 @@ module AlertManagement
issue = result.payload[:issue]
return error(object_errors(alert), issue) unless associate_alert_with_issue(issue)
+ update_title_for(issue)
+
SystemNoteService.new_alert_issue(alert, issue, user)
result
@@ -50,6 +55,12 @@ module AlertManagement
alert.update(issue_id: issue.id)
end
+ def update_title_for(issue)
+ return unless issue.title == DEFAULT_ALERT_TITLE
+
+ issue.update!(title: "#{DEFAULT_INCIDENT_TITLE} #{issue.iid}")
+ end
+
def error(message, issue = nil)
ServiceResponse.error(payload: { issue: issue }, message: message)
end
diff --git a/app/services/authorized_project_update/periodic_recalculate_service.rb b/app/services/authorized_project_update/periodic_recalculate_service.rb
index 91c0f50e5e0..662d10c648b 100644
--- a/app/services/authorized_project_update/periodic_recalculate_service.rb
+++ b/app/services/authorized_project_update/periodic_recalculate_service.rb
@@ -2,8 +2,8 @@
module AuthorizedProjectUpdate
class PeriodicRecalculateService
- BATCH_SIZE = 480
- DELAY_INTERVAL = 30.seconds.to_i
+ BATCH_SIZE = 450
+ DELAY_INTERVAL = 50.seconds.to_i
def execute
# Using this approach (instead of eg. User.each_batch) keeps the arguments
diff --git a/app/services/boards/base_item_move_service.rb b/app/services/boards/base_item_move_service.rb
new file mode 100644
index 00000000000..bf3e29df54b
--- /dev/null
+++ b/app/services/boards/base_item_move_service.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module Boards
+ class BaseItemMoveService < Boards::BaseService
+ def execute(issuable)
+ issuable_modification_params = issuable_params(issuable)
+ return false if issuable_modification_params.empty?
+
+ move_single_issuable(issuable, issuable_modification_params)
+ end
+
+ private
+
+ def issuable_params(issuable)
+ attrs = {}
+
+ if move_between_lists?
+ attrs.merge!(
+ add_label_ids: add_label_ids,
+ remove_label_ids: remove_label_ids,
+ state_event: issuable_state
+ )
+ end
+
+ attrs
+ end
+
+ def move_single_issuable(issuable, issuable_modification_params)
+ ability_name = :"admin_#{issuable.to_ability_name}"
+ return unless can?(current_user, ability_name, issuable)
+
+ update(issuable, issuable_modification_params)
+ end
+
+ def move_between_lists?
+ moving_from_list.present? && moving_to_list.present? &&
+ moving_from_list != moving_to_list
+ end
+
+ def moving_from_list
+ return unless params[:from_list_id].present?
+
+ @moving_from_list ||= board.lists.id_in(params[:from_list_id]).first
+ end
+
+ def moving_to_list
+ return unless params[:to_list_id].present?
+
+ @moving_to_list ||= board.lists.id_in(params[:to_list_id]).first
+ end
+
+ def issuable_state
+ return 'reopen' if moving_from_list.closed?
+ return 'close' if moving_to_list.closed?
+ end
+
+ def add_label_ids
+ [moving_to_list.label_id].compact
+ end
+
+ def remove_label_ids
+ label_ids =
+ if moving_to_list.movable?
+ moving_from_list.label_id
+ else
+ ::Label.ids_on_board(board.id)
+ end
+
+ Array(label_ids).compact
+ end
+ end
+end
diff --git a/app/services/boards/base_items_list_service.rb b/app/services/boards/base_items_list_service.rb
index 851120ef597..5aebf216460 100644
--- a/app/services/boards/base_items_list_service.rb
+++ b/app/services/boards/base_items_list_service.rb
@@ -11,8 +11,24 @@ module Boards
ordered_items
end
+ # rubocop: disable CodeReuse/ActiveRecord
+ def metadata
+ issuables = item_model.arel_table
+ keys = metadata_fields.keys
+ # TODO: eliminate need for SQL literal fragment
+ columns = Arel.sql(metadata_fields.values_at(*keys).join(', '))
+ results = item_model.where(id: items.select(issuables[:id])).pluck(columns)
+
+ Hash[keys.zip(results.flatten)]
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
private
+ def metadata_fields
+ { size: 'COUNT(*)' }
+ end
+
def ordered_items
raise NotImplementedError
end
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 27d59e052c7..c6855f29af0 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -9,18 +9,6 @@ module Boards
IssuesFinder.valid_params
end
- # rubocop: disable CodeReuse/ActiveRecord
- def metadata
- issues = Issue.arel_table
- keys = metadata_fields.keys
- # TODO: eliminate need for SQL literal fragment
- columns = Arel.sql(metadata_fields.values_at(*keys).join(', '))
- results = Issue.where(id: items.select(issues[:id])).pluck(columns)
-
- Hash[keys.zip(results.flatten)]
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
private
def ordered_items
@@ -35,10 +23,6 @@ module Boards
@board ||= parent.boards.find(params[:board_id])
end
- def metadata_fields
- { size: 'COUNT(*)' }
- end
-
def filter_params
set_scope
set_non_archived
diff --git a/app/services/boards/issues/move_service.rb b/app/services/boards/issues/move_service.rb
index 56a7e228b10..99374fa01ae 100644
--- a/app/services/boards/issues/move_service.rb
+++ b/app/services/boards/issues/move_service.rb
@@ -2,13 +2,8 @@
module Boards
module Issues
- class MoveService < Boards::BaseService
- def execute(issue)
- issue_modification_params = issue_params(issue)
- return false if issue_modification_params.empty?
-
- move_single_issue(issue, issue_modification_params)
- end
+ class MoveService < Boards::BaseItemMoveService
+ extend ::Gitlab::Utils::Override
def execute_multiple(issues)
return execute_multiple_empty_result if issues.empty?
@@ -16,7 +11,7 @@ module Boards
handled_issues = []
last_inserted_issue_id = nil
count = issues.each.inject(0) do |moved_count, issue|
- issue_modification_params = issue_params(issue)
+ issue_modification_params = issuable_params(issue)
next moved_count if issue_modification_params.empty?
if last_inserted_issue_id
@@ -24,7 +19,7 @@ module Boards
end
last_inserted_issue_id = issue.id
- handled_issue = move_single_issue(issue, issue_modification_params)
+ handled_issue = move_single_issuable(issue, issue_modification_params)
handled_issues << present_issue_entity(handled_issue) if handled_issue
handled_issue && handled_issue.valid? ? moved_count + 1 : moved_count
end
@@ -54,51 +49,17 @@ module Boards
move_between_ids({ move_after_id: nil, move_before_id: id })
end
- def move_single_issue(issue, issue_modification_params)
- return unless can?(current_user, :update_issue, issue)
-
- update(issue, issue_modification_params)
- end
-
def board
@board ||= parent.boards.find(params[:board_id])
end
- def move_between_lists?
- moving_from_list.present? && moving_to_list.present? &&
- moving_from_list != moving_to_list
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def moving_from_list
- return unless params[:from_list_id].present?
-
- @moving_from_list ||= board.lists.find_by(id: params[:from_list_id])
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def moving_to_list
- return unless params[:to_list_id].present?
-
- @moving_to_list ||= board.lists.find_by(id: params[:to_list_id])
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
def update(issue, issue_modification_params)
::Issues::UpdateService.new(issue.project, current_user, issue_modification_params).execute(issue)
end
- def issue_params(issue)
- attrs = {}
-
- if move_between_lists?
- attrs.merge!(
- add_label_ids: add_label_ids,
- remove_label_ids: remove_label_ids,
- state_event: issue_state
- )
- end
+ override :issuable_params
+ def issuable_params(issuable)
+ attrs = super
move_between_ids = move_between_ids(params)
if move_between_ids
@@ -109,28 +70,6 @@ module Boards
attrs
end
- def issue_state
- return 'reopen' if moving_from_list.closed?
- return 'close' if moving_to_list.closed?
- end
-
- def add_label_ids
- [moving_to_list.label_id].compact
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def remove_label_ids
- label_ids =
- if moving_to_list.movable?
- moving_from_list.label_id
- else
- ::Label.on_board(board.id).pluck(:label_id)
- end
-
- Array(label_ids).compact
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
def move_between_ids(move_params)
ids = [move_params[:move_after_id], move_params[:move_before_id]]
.map(&:to_i)
diff --git a/app/services/boards/list_service.rb b/app/services/boards/list_service.rb
deleted file mode 100644
index 80ceb91f56d..00000000000
--- a/app/services/boards/list_service.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-module Boards
- class ListService < Boards::BaseService
- def execute
- find_boards
- end
-
- private
-
- def boards
- parent.boards.order_by_name_asc
- end
-
- def first_board
- parent.boards.first_board
- end
-
- def find_boards
- found =
- if parent.multiple_issue_boards_available?
- boards
- else
- # When multiple issue boards are not available
- # a user is only allowed to view the default shown board
- first_board
- end
-
- params[:board_id].present? ? [found.find(params[:board_id])] : found
- end
- end
-end
diff --git a/app/services/boards/lists/list_service.rb b/app/services/boards/lists/list_service.rb
index e4c789c4597..3c296cde51e 100644
--- a/app/services/boards/lists/list_service.rb
+++ b/app/services/boards/lists/list_service.rb
@@ -9,7 +9,26 @@ module Boards
end
lists = board.lists.preload_associated_models
- params[:list_id].present? ? lists.where(id: params[:list_id]) : lists # rubocop: disable CodeReuse/ActiveRecord
+
+ return lists.id_in(params[:list_id]) if params[:list_id].present?
+
+ list_types = unavailable_list_types_for(board)
+ lists.without_types(list_types)
+ end
+
+ private
+
+ def unavailable_list_types_for(board)
+ hidden_lists_for(board)
+ end
+
+ def hidden_lists_for(board)
+ hidden = []
+
+ hidden << ::List.list_types[:backlog] if board.hide_backlog_list
+ hidden << ::List.list_types[:closed] if board.hide_closed_list
+
+ hidden
end
end
end
diff --git a/app/services/boards/lists/update_service.rb b/app/services/boards/lists/update_service.rb
index 4a463372c82..e2d9c371ca2 100644
--- a/app/services/boards/lists/update_service.rb
+++ b/app/services/boards/lists/update_service.rb
@@ -47,11 +47,11 @@ module Boards
end
def can_read?(list)
- Ability.allowed?(current_user, :read_list, parent)
+ Ability.allowed?(current_user, :read_issue_board_list, parent)
end
def can_admin?(list)
- Ability.allowed?(current_user, :admin_list, parent)
+ Ability.allowed?(current_user, :admin_issue_board_list, parent)
end
end
end
diff --git a/app/services/boards/update_service.rb b/app/services/boards/update_service.rb
index 0340836fd78..48c6e44d55e 100644
--- a/app/services/boards/update_service.rb
+++ b/app/services/boards/update_service.rb
@@ -2,9 +2,20 @@
module Boards
class UpdateService < Boards::BaseService
+ PERMITTED_PARAMS = %i(name hide_backlog_list hide_closed_list).freeze
+
def execute(board)
+ filter_params
board.update(params)
end
+
+ def filter_params
+ params.slice!(*permitted_params)
+ end
+
+ def permitted_params
+ PERMITTED_PARAMS
+ end
end
end
diff --git a/app/services/bulk_import_service.rb b/app/services/bulk_import_service.rb
index 29439a79afe..4e13e967dbd 100644
--- a/app/services/bulk_import_service.rb
+++ b/app/services/bulk_import_service.rb
@@ -39,7 +39,12 @@ class BulkImportService
BulkImportWorker.perform_async(bulk_import.id)
- bulk_import
+ ServiceResponse.success(payload: bulk_import)
+ rescue ActiveRecord::RecordInvalid => e
+ ServiceResponse.error(
+ message: e.message,
+ http_status: :unprocessable_entity
+ )
end
private
diff --git a/app/services/ci/build_report_result_service.rb b/app/services/ci/build_report_result_service.rb
index f138aa91236..8bdb51320f9 100644
--- a/app/services/ci/build_report_result_service.rb
+++ b/app/services/ci/build_report_result_service.rb
@@ -33,7 +33,8 @@ module Ci
failed: test_suite.failed_count,
errored: test_suite.error_count,
skipped: test_suite.skipped_count,
- success: test_suite.success_count
+ success: test_suite.success_count,
+ suite_error: test_suite.suite_error
}
}
end
diff --git a/app/services/ci/create_downstream_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index 629d85b041f..93f0338fcba 100644
--- a/app/services/ci/create_downstream_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -43,7 +43,7 @@ module Ci
private
def update_bridge_status!(bridge, pipeline)
- Gitlab::OptimisticLocking.retry_lock(bridge) do |subject|
+ Gitlab::OptimisticLocking.retry_lock(bridge, name: 'create_downstream_pipeline_update_bridge_status') do |subject|
if pipeline.created_successfully?
# If bridge uses `strategy:depend` we leave it running
# and update the status when the downstream pipeline completes.
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index dc42411dfa1..0fd47e625fd 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -122,7 +122,9 @@ module Ci
end
def record_conversion_event
- Experiments::RecordConversionEventWorker.perform_async(:ci_syntax_templates, current_user.id)
+ return unless project.namespace.recent?
+
+ Experiments::RecordConversionEventWorker.perform_async(:ci_syntax_templates_b, current_user.id)
end
def create_namespace_onboarding_action
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index 7d8a3c17abe..d91cfb3cc82 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -4,7 +4,6 @@ module Ci
class DestroyExpiredJobArtifactsService
include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
- include ::Gitlab::Utils::StrongMemoize
BATCH_SIZE = 100
LOOP_TIMEOUT = 5.minutes
@@ -34,50 +33,20 @@ module Ci
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
- artifacts = relation.unlocked.with_destroy_preloads.to_a
+ # For performance reasons, join with ci_pipelines after the batch is queried.
+ # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
+ artifacts = relation.unlocked
+
+ service_response = destroy_batch_async(artifacts)
+ @removed_artifacts_count += service_response[:destroyed_artifacts_count]
- parallel_destroy_batch(artifacts) if artifacts.any?
break if loop_timeout?(start_at)
break if index >= LOOP_LIMIT
end
end
- def parallel_destroy_batch(job_artifacts)
- Ci::DeletedObject.transaction do
- Ci::DeletedObject.bulk_import(job_artifacts)
- Ci::JobArtifact.id_in(job_artifacts.map(&:id)).delete_all
- destroy_related_records_for(job_artifacts)
- end
-
- # This is executed outside of the transaction because it depends on Redis
- update_project_statistics_for(job_artifacts)
- increment_monitoring_statistics(job_artifacts.size)
- end
-
- # This method is implemented in EE and it must do only database work
- def destroy_related_records_for(job_artifacts); end
-
- def update_project_statistics_for(job_artifacts)
- artifacts_by_project = job_artifacts.group_by(&:project)
- artifacts_by_project.each do |project, artifacts|
- delta = -artifacts.sum { |artifact| artifact.size.to_i }
- ProjectStatistics.increment_statistic(
- project, Ci::JobArtifact.project_statistics_name, delta)
- end
- end
-
- def increment_monitoring_statistics(size)
- destroyed_artifacts_counter.increment({}, size)
- @removed_artifacts_count += size
- end
-
- def destroyed_artifacts_counter
- strong_memoize(:destroyed_artifacts_counter) do
- name = :destroyed_job_artifacts_count_total
- comment = 'Counter of destroyed expired job artifacts'
-
- ::Gitlab::Metrics.counter(name, comment)
- end
+ def destroy_batch_async(artifacts)
+ Ci::JobArtifactsDestroyBatchService.new(artifacts).execute
end
def loop_timeout?(start_at)
@@ -85,5 +54,3 @@ module Ci
end
end
end
-
-Ci::DestroyExpiredJobArtifactsService.prepend_if_ee('EE::Ci::DestroyExpiredJobArtifactsService')
diff --git a/app/services/ci/expire_pipeline_cache_service.rb b/app/services/ci/expire_pipeline_cache_service.rb
index 8343e0f8cd0..2ae60907dab 100644
--- a/app/services/ci/expire_pipeline_cache_service.rb
+++ b/app/services/ci/expire_pipeline_cache_service.rb
@@ -2,6 +2,11 @@
module Ci
class ExpirePipelineCacheService
+ class UrlHelpers
+ include ::Gitlab::Routing
+ include ::GitlabRoutingHelper
+ end
+
def execute(pipeline, delete: false)
store = Gitlab::EtagCaching::Store.new
@@ -17,27 +22,27 @@ module Ci
private
def project_pipelines_path(project)
- Gitlab::Routing.url_helpers.project_pipelines_path(project, format: :json)
+ url_helpers.project_pipelines_path(project, format: :json)
end
def project_pipeline_path(project, pipeline)
- Gitlab::Routing.url_helpers.project_pipeline_path(project, pipeline, format: :json)
+ url_helpers.project_pipeline_path(project, pipeline, format: :json)
end
def commit_pipelines_path(project, commit)
- Gitlab::Routing.url_helpers.pipelines_project_commit_path(project, commit.id, format: :json)
+ url_helpers.pipelines_project_commit_path(project, commit.id, format: :json)
end
def new_merge_request_pipelines_path(project)
- Gitlab::Routing.url_helpers.project_new_merge_request_path(project, format: :json)
+ url_helpers.project_new_merge_request_path(project, format: :json)
end
def pipelines_project_merge_request_path(merge_request)
- Gitlab::Routing.url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
+ url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
end
def merge_request_widget_path(merge_request)
- Gitlab::Routing.url_helpers.cached_widget_project_json_merge_request_path(merge_request.project, merge_request, format: :json)
+ url_helpers.cached_widget_project_json_merge_request_path(merge_request.project, merge_request, format: :json)
end
def each_pipelines_merge_request_path(pipeline)
@@ -47,6 +52,10 @@ module Ci
end
end
+ def graphql_pipeline_path(pipeline)
+ url_helpers.graphql_etag_pipeline_path(pipeline)
+ end
+
# Updates ETag caches of a pipeline.
#
# This logic resides in a separate method so that EE can more easily extend
@@ -58,14 +67,20 @@ module Ci
project = pipeline.project
store.touch(project_pipelines_path(project))
- store.touch(project_pipeline_path(project, pipeline))
store.touch(commit_pipelines_path(project, pipeline.commit)) unless pipeline.commit.nil?
store.touch(new_merge_request_pipelines_path(project))
each_pipelines_merge_request_path(pipeline) do |path|
store.touch(path)
end
+
+ pipeline.self_with_ancestors_and_descendants.each do |relative_pipeline|
+ store.touch(project_pipeline_path(relative_pipeline.project, relative_pipeline))
+ store.touch(graphql_pipeline_path(relative_pipeline))
+ end
+ end
+
+ def url_helpers
+ @url_helpers ||= UrlHelpers.new
end
end
end
-
-Ci::ExpirePipelineCacheService.prepend_if_ee('EE::Ci::ExpirePipelineCacheService')
diff --git a/app/services/ci/job_artifacts_destroy_batch_service.rb b/app/services/ci/job_artifacts_destroy_batch_service.rb
new file mode 100644
index 00000000000..f8ece27fe86
--- /dev/null
+++ b/app/services/ci/job_artifacts_destroy_batch_service.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module Ci
+ class JobArtifactsDestroyBatchService
+ include BaseServiceUtility
+ include ::Gitlab::Utils::StrongMemoize
+
+ # Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
+ # Not for use outsie of the ci namespace
+
+ # Adds the passed batch of job artifacts to the `ci_deleted_objects` table
+ # for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
+ # and then deletes the batch of related `ci_job_artifacts` records.
+ # Params:
+ # +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
+ # +pick_up_at+:: When to pick up for deletion of files
+ # Returns:
+ # +Hash+:: A hash with status and destroyed_artifacts_count keys
+ def initialize(job_artifacts, pick_up_at: nil)
+ @job_artifacts = job_artifacts.with_destroy_preloads.to_a
+ @pick_up_at = pick_up_at
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def execute
+ return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
+
+ Ci::DeletedObject.transaction do
+ Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
+ Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
+ destroy_related_records(@job_artifacts)
+ end
+
+ # This is executed outside of the transaction because it depends on Redis
+ update_project_statistics
+ increment_monitoring_statistics(artifacts_count)
+
+ success(destroyed_artifacts_count: artifacts_count)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ # This method is implemented in EE and it must do only database work
+ def destroy_related_records(artifacts); end
+
+ def update_project_statistics
+ artifacts_by_project = @job_artifacts.group_by(&:project)
+ artifacts_by_project.each do |project, artifacts|
+ delta = -artifacts.sum { |artifact| artifact.size.to_i }
+ ProjectStatistics.increment_statistic(
+ project, Ci::JobArtifact.project_statistics_name, delta)
+ end
+ end
+
+ def increment_monitoring_statistics(size)
+ metrics.increment_destroyed_artifacts(size)
+ end
+
+ def metrics
+ @metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
+ end
+
+ def artifacts_count
+ strong_memoize(:artifacts_count) do
+ @job_artifacts.count
+ end
+ end
+ end
+end
+
+Ci::JobArtifactsDestroyBatchService.prepend_if_ee('EE::Ci::JobArtifactsDestroyBatchService')
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service.rb b/app/services/ci/pipeline_processing/atomic_processing_service.rb
index a23d5d8941a..236d660d829 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service.rb
@@ -53,7 +53,7 @@ module Ci
end
def update_processables!(ids)
- created_processables = pipeline.processables.for_ids(ids)
+ created_processables = pipeline.processables.id_in(ids)
.with_project_preload
.created
.latest
@@ -80,7 +80,7 @@ module Ci
return unless Ci::HasStatus::COMPLETED_STATUSES.include?(status)
# transition status if possible
- Gitlab::OptimisticLocking.retry_lock(processable) do |subject|
+ Gitlab::OptimisticLocking.retry_lock(processable, name: 'atomic_processing_update_processable') do |subject|
Ci::ProcessBuildService.new(project, subject.user)
.execute(subject, status)
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
index aeabbb99468..35818e2cf3d 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -78,7 +78,7 @@ module Ci
def status_for_array(statuses, dag:)
result = Gitlab::Ci::Status::Composite
- .new(statuses, dag: dag)
+ .new(statuses, dag: dag, project: pipeline.project)
.status
result || 'success'
end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index 678b386fbbf..970652b4da3 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -30,6 +30,8 @@ module Ci
# this updates only when there are data that needs to be updated, there are two groups with no retried flag
# rubocop: disable CodeReuse/ActiveRecord
def update_retried
+ return if Feature.enabled?(:ci_remove_update_retried_from_process_pipeline, pipeline.project, default_enabled: :yaml)
+
# find the latest builds for each name
latest_statuses = pipeline.latest_statuses
.group(:name)
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index 59691fe4ef3..ed9e44d60f1 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -4,21 +4,85 @@ module Ci
# This class responsible for assigning
# proper pending build to runner on runner API request
class RegisterJobService
- attr_reader :runner
+ attr_reader :runner, :metrics
- JOB_QUEUE_DURATION_SECONDS_BUCKETS = [1, 3, 10, 30, 60, 300, 900, 1800, 3600].freeze
- JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET = 5.freeze
- METRICS_SHARD_TAG_PREFIX = 'metrics_shard::'
- DEFAULT_METRICS_SHARD = 'default'
+ TEMPORARY_LOCK_TIMEOUT = 3.seconds
Result = Struct.new(:build, :build_json, :valid?)
+ MAX_QUEUE_DEPTH = 50
+
def initialize(runner)
@runner = runner
+ @metrics = ::Gitlab::Ci::Queue::Metrics.new(runner)
end
- # rubocop: disable CodeReuse/ActiveRecord
def execute(params = {})
+ @metrics.increment_queue_operation(:queue_attempt)
+
+ @metrics.observe_queue_time do
+ process_queue(params)
+ end
+ end
+
+ private
+
+ def process_queue(params)
+ valid = true
+ depth = 0
+
+ each_build(params) do |build|
+ depth += 1
+ @metrics.increment_queue_operation(:queue_iteration)
+
+ if depth > max_queue_depth
+ @metrics.increment_queue_operation(:queue_depth_limit)
+
+ valid = false
+
+ break
+ end
+
+ # We read builds from replicas
+ # It is likely that some other concurrent connection is processing
+ # a given build at a given moment. To avoid an expensive compute
+ # we perform an exclusive lease on Redis to acquire a build temporarily
+ unless acquire_temporary_lock(build.id)
+ @metrics.increment_queue_operation(:build_temporary_locked)
+
+ # We failed to acquire lock
+ # - our queue is not complete as some resources are locked temporarily
+ # - we need to re-process it again to ensure that all builds are handled
+ valid = false
+
+ next
+ end
+
+ result = process_build(build, params)
+ next unless result
+
+ if result.valid?
+ @metrics.register_success(result.build)
+ @metrics.observe_queue_depth(:found, depth)
+
+ return result # rubocop:disable Cop/AvoidReturnFromBlocks
+ else
+ # The usage of valid: is described in
+ # handling of ActiveRecord::StaleObjectError
+ valid = false
+ end
+ end
+
+ @metrics.increment_queue_operation(:queue_conflict) unless valid
+ @metrics.observe_queue_depth(:conflict, depth) unless valid
+ @metrics.observe_queue_depth(:not_found, depth) if valid
+ @metrics.register_failure
+
+ Result.new(nil, nil, valid)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def each_build(params, &blk)
builds =
if runner.instance_type?
builds_for_shared_runner
@@ -28,8 +92,6 @@ module Ci
builds_for_project_runner
end
- valid = true
-
# pick builds that does not have other tags than runner's one
builds = builds.matches_tag_ids(runner.tags.ids)
@@ -43,37 +105,42 @@ module Ci
builds = builds.queued_before(params[:job_age].seconds.ago)
end
- builds.each do |build|
- result = process_build(build, params)
- next unless result
+ if Feature.enabled?(:ci_register_job_service_one_by_one, runner)
+ build_ids = builds.pluck(:id)
- if result.valid?
- register_success(result.build)
+ @metrics.observe_queue_size(-> { build_ids.size })
- return result
- else
- # The usage of valid: is described in
- # handling of ActiveRecord::StaleObjectError
- valid = false
+ build_ids.each do |build_id|
+ yield Ci::Build.find(build_id)
end
- end
+ else
+ @metrics.observe_queue_size(-> { builds.to_a.size })
- register_failure
- Result.new(nil, nil, valid)
+ builds.each(&blk)
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
- private
-
def process_build(build, params)
- return unless runner.can_pick?(build)
+ unless build.pending?
+ @metrics.increment_queue_operation(:build_not_pending)
+ return
+ end
+
+ if runner.can_pick?(build)
+ @metrics.increment_queue_operation(:build_can_pick)
+ else
+ @metrics.increment_queue_operation(:build_not_pick)
+
+ return
+ end
# In case when 2 runners try to assign the same build, second runner will be declined
# with StateMachines::InvalidTransition or StaleObjectError when doing run! or save method.
if assign_runner!(build, params)
present_build!(build)
end
- rescue StateMachines::InvalidTransition, ActiveRecord::StaleObjectError
+ rescue ActiveRecord::StaleObjectError
# We are looping to find another build that is not conflicting
# It also indicates that this build can be picked and passed to runner.
# If we don't do it, basically a bunch of runners would be competing for a build
@@ -83,8 +150,16 @@ module Ci
# In case we hit the concurrency-access lock,
# we still have to return 409 in the end,
# to make sure that this is properly handled by runner.
+ @metrics.increment_queue_operation(:build_conflict_lock)
+
+ Result.new(nil, nil, false)
+ rescue StateMachines::InvalidTransition
+ @metrics.increment_queue_operation(:build_conflict_transition)
+
Result.new(nil, nil, false)
rescue => ex
+ @metrics.increment_queue_operation(:build_conflict_exception)
+
# If an error (e.g. GRPC::DeadlineExceeded) occurred constructing
# the result, consider this as a failure to be retried.
scheduler_failure!(build)
@@ -94,6 +169,16 @@ module Ci
nil
end
+ def max_queue_depth
+ @max_queue_depth ||= begin
+ if Feature.enabled?(:gitlab_ci_builds_queue_limit, runner, default_enabled: false)
+ MAX_QUEUE_DEPTH
+ else
+ ::Gitlab::Database::MAX_INT_VALUE
+ end
+ end
+ end
+
# Force variables evaluation to occur now
def present_build!(build)
# We need to use the presenter here because Gitaly calls in the presenter
@@ -110,16 +195,30 @@ module Ci
failure_reason, _ = pre_assign_runner_checks.find { |_, check| check.call(build, params) }
if failure_reason
+ @metrics.increment_queue_operation(:runner_pre_assign_checks_failed)
+
build.drop!(failure_reason)
else
+ @metrics.increment_queue_operation(:runner_pre_assign_checks_success)
+
build.run!
end
!failure_reason
end
+ def acquire_temporary_lock(build_id)
+ return true unless Feature.enabled?(:ci_register_job_temporary_lock, runner)
+
+ key = "build/register/#{build_id}"
+
+ Gitlab::ExclusiveLease
+ .new(key, timeout: TEMPORARY_LOCK_TIMEOUT.to_i)
+ .try_obtain
+ end
+
def scheduler_failure!(build)
- Gitlab::OptimisticLocking.retry_lock(build, 3) do |subject|
+ Gitlab::OptimisticLocking.retry_lock(build, 3, name: 'register_job_scheduler_failure') do |subject|
subject.drop!(:scheduler_failure)
end
rescue => ex
@@ -189,48 +288,6 @@ module Ci
builds
end
- def register_failure
- failed_attempt_counter.increment
- attempt_counter.increment
- end
-
- def register_success(job)
- labels = { shared_runner: runner.instance_type?,
- jobs_running_for_project: jobs_running_for_project(job),
- shard: DEFAULT_METRICS_SHARD }
-
- if runner.instance_type?
- shard = runner.tag_list.sort.find { |name| name.starts_with?(METRICS_SHARD_TAG_PREFIX) }
- labels[:shard] = shard.gsub(METRICS_SHARD_TAG_PREFIX, '') if shard
- end
-
- job_queue_duration_seconds.observe(labels, Time.current - job.queued_at) unless job.queued_at.nil?
- attempt_counter.increment
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def jobs_running_for_project(job)
- return '+Inf' unless runner.instance_type?
-
- # excluding currently started job
- running_jobs_count = job.project.builds.running.where(runner: Ci::Runner.instance_type)
- .limit(JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET + 1).count - 1
- running_jobs_count < JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET ? running_jobs_count : "#{JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET}+"
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def failed_attempt_counter
- @failed_attempt_counter ||= Gitlab::Metrics.counter(:job_register_attempts_failed_total, "Counts the times a runner tries to register a job")
- end
-
- def attempt_counter
- @attempt_counter ||= Gitlab::Metrics.counter(:job_register_attempts_total, "Counts the times a runner tries to register a job")
- end
-
- def job_queue_duration_seconds
- @job_queue_duration_seconds ||= Gitlab::Metrics.histogram(:job_queue_duration_seconds, 'Request handling execution time', {}, JOB_QUEUE_DURATION_SECONDS_BUCKETS)
- end
-
def pre_assign_runner_checks
{
missing_dependency_failure: -> (build, _) { !build.has_valid_build_dependencies? },
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index e5e79f70616..b2c5249a0c7 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -19,7 +19,7 @@ module Ci
mark_subsequent_stages_as_processable(build)
build.pipeline.reset_ancestor_bridges!
- Gitlab::OptimisticLocking.retry_lock(new_build, &:enqueue)
+ Gitlab::OptimisticLocking.retry_lock(new_build, name: 'retry_build', &:enqueue)
MergeRequests::AddTodoWhenBuildFailsService
.new(project, current_user)
@@ -68,7 +68,7 @@ module Ci
def mark_subsequent_stages_as_processable(build)
build.pipeline.processables.skipped.after_stage(build.stage_idx).find_each do |skipped|
- retry_optimistic_lock(skipped) { |build| build.process(current_user) }
+ retry_optimistic_lock(skipped, name: 'ci_retry_build_mark_subsequent_stages') { |build| build.process(current_user) }
end
end
end
diff --git a/app/services/ci/retry_pipeline_service.rb b/app/services/ci/retry_pipeline_service.rb
index dea4bf73a4c..90ee7b9b3ba 100644
--- a/app/services/ci/retry_pipeline_service.rb
+++ b/app/services/ci/retry_pipeline_service.rb
@@ -23,7 +23,7 @@ module Ci
end
pipeline.builds.latest.skipped.find_each do |skipped|
- retry_optimistic_lock(skipped) { |build| build.process(current_user) }
+ retry_optimistic_lock(skipped, name: 'ci_retry_pipeline') { |build| build.process(current_user) }
end
pipeline.reset_ancestor_bridges!
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index 241eba733ea..cf629b879b3 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -2,16 +2,21 @@
module Ci
class UpdateBuildQueueService
- def execute(build)
- tick_for(build, build.project.all_runners)
+ def execute(build, metrics = ::Gitlab::Ci::Queue::Metrics)
+ tick_for(build, build.project.all_runners, metrics)
end
private
- def tick_for(build, runners)
+ def tick_for(build, runners, metrics)
runners = runners.with_recent_runner_queue
+ runners = runners.with_tags if Feature.enabled?(:ci_preload_runner_tags, default_enabled: :yaml)
+
+ metrics.observe_active_runners(-> { runners.to_a.size })
runners.each do |runner|
+ metrics.increment_runner_tick(runner)
+
runner.pick_build!(build)
end
end
diff --git a/app/services/clusters/kubernetes.rb b/app/services/clusters/kubernetes.rb
index 819ac4c8464..ef549b56946 100644
--- a/app/services/clusters/kubernetes.rb
+++ b/app/services/clusters/kubernetes.rb
@@ -14,5 +14,7 @@ module Clusters
GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME = 'gitlab-crossplane-database-rolebinding'
KNATIVE_SERVING_NAMESPACE = 'knative-serving'
ISTIO_SYSTEM_NAMESPACE = 'istio-system'
+ GITLAB_CILIUM_ROLE_NAME = 'gitlab-cilium-role'
+ GITLAB_CILIUM_ROLE_BINDING_NAME = 'gitlab-cilium-rolebinding'
end
end
diff --git a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
index eabc428d0d2..ecad33fc7c0 100644
--- a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
+++ b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
@@ -53,6 +53,8 @@ module Clusters
create_or_update_knative_serving_role_binding
create_or_update_crossplane_database_role
create_or_update_crossplane_database_role_binding
+ create_or_update_cilium_role
+ create_or_update_cilium_role_binding
end
private
@@ -97,6 +99,14 @@ module Clusters
kubeclient.update_role_binding(crossplane_database_role_binding_resource)
end
+ def create_or_update_cilium_role
+ kubeclient.update_role(cilium_role_resource)
+ end
+
+ def create_or_update_cilium_role_binding
+ kubeclient.update_role_binding(cilium_role_binding_resource)
+ end
+
def service_account_resource
Gitlab::Kubernetes::ServiceAccount.new(
service_account_name,
@@ -175,6 +185,28 @@ module Clusters
service_account_name: service_account_name
).generate
end
+
+ def cilium_role_resource
+ Gitlab::Kubernetes::Role.new(
+ name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME,
+ namespace: service_account_namespace,
+ rules: [{
+ apiGroups: %w(cilium.io),
+ resources: %w(ciliumnetworkpolicies),
+ verbs: %w(get list create update patch)
+ }]
+ ).generate
+ end
+
+ def cilium_role_binding_resource
+ Gitlab::Kubernetes::RoleBinding.new(
+ name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME,
+ role_name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME,
+ role_kind: :Role,
+ namespace: service_account_namespace,
+ service_account_name: service_account_name
+ ).generate
+ end
end
end
end
diff --git a/app/services/concerns/alert_management/alert_processing.rb b/app/services/concerns/alert_management/alert_processing.rb
index 9b15c5d7b4b..7b6f681fe3e 100644
--- a/app/services/concerns/alert_management/alert_processing.rb
+++ b/app/services/concerns/alert_management/alert_processing.rb
@@ -41,14 +41,21 @@ module AlertManagement
end
def process_resolved_alert
+ SystemNoteService.log_resolving_alert(alert, alert_source)
+
return unless auto_close_incident?
- return close_issue(alert.issue) if alert.resolve(incoming_payload.ends_at)
- logger.warn(
- message: 'Unable to update AlertManagement::Alert status to resolved',
- project_id: project.id,
- alert_id: alert.id
- )
+ if alert.resolve(incoming_payload.ends_at)
+ SystemNoteService.change_alert_status(alert, User.alert_bot)
+
+ close_issue(alert.issue)
+ else
+ logger.warn(
+ message: 'Unable to update AlertManagement::Alert status to resolved',
+ project_id: project.id,
+ alert_id: alert.id
+ )
+ end
end
def process_firing_alert
diff --git a/app/services/dependency_proxy/find_or_create_manifest_service.rb b/app/services/dependency_proxy/find_or_create_manifest_service.rb
index 6b46f5e4c59..ee608d715aa 100644
--- a/app/services/dependency_proxy/find_or_create_manifest_service.rb
+++ b/app/services/dependency_proxy/find_or_create_manifest_service.rb
@@ -13,7 +13,7 @@ module DependencyProxy
def execute
@manifest = @group.dependency_proxy_manifests
- .find_or_initialize_by_file_name(@file_name)
+ .find_or_initialize_by_file_name_or_digest(file_name: @file_name, digest: @tag)
head_result = DependencyProxy::HeadManifestService.new(@image, @tag, @token).execute
@@ -30,6 +30,7 @@ module DependencyProxy
def pull_new_manifest
DependencyProxy::PullManifestService.new(@image, @tag, @token).execute_with_manifest do |new_manifest|
@manifest.update!(
+ content_type: new_manifest[:content_type],
digest: new_manifest[:digest],
file: new_manifest[:file],
size: new_manifest[:file].size
@@ -38,7 +39,9 @@ module DependencyProxy
end
def cached_manifest_matches?(head_result)
- @manifest && @manifest.digest == head_result[:digest]
+ return false if head_result[:status] == :error
+
+ @manifest && @manifest.digest == head_result[:digest] && @manifest.content_type == head_result[:content_type]
end
def respond
diff --git a/app/services/dependency_proxy/head_manifest_service.rb b/app/services/dependency_proxy/head_manifest_service.rb
index 87d9c417c98..ecc3eb77399 100644
--- a/app/services/dependency_proxy/head_manifest_service.rb
+++ b/app/services/dependency_proxy/head_manifest_service.rb
@@ -2,6 +2,8 @@
module DependencyProxy
class HeadManifestService < DependencyProxy::BaseService
+ ACCEPT_HEADERS = ::ContainerRegistry::Client::ACCEPTED_TYPES.join(',')
+
def initialize(image, tag, token)
@image = image
@tag = tag
@@ -9,10 +11,10 @@ module DependencyProxy
end
def execute
- response = Gitlab::HTTP.head(manifest_url, headers: auth_headers)
+ response = Gitlab::HTTP.head(manifest_url, headers: auth_headers.merge(Accept: ACCEPT_HEADERS))
if response.success?
- success(digest: response.headers['docker-content-digest'])
+ success(digest: response.headers['docker-content-digest'], content_type: response.headers['content-type'])
else
error(response.body, response.code)
end
diff --git a/app/services/dependency_proxy/pull_manifest_service.rb b/app/services/dependency_proxy/pull_manifest_service.rb
index 5c804489fd1..737414c396e 100644
--- a/app/services/dependency_proxy/pull_manifest_service.rb
+++ b/app/services/dependency_proxy/pull_manifest_service.rb
@@ -11,7 +11,7 @@ module DependencyProxy
def execute_with_manifest
raise ArgumentError, 'Block must be provided' unless block_given?
- response = Gitlab::HTTP.get(manifest_url, headers: auth_headers)
+ response = Gitlab::HTTP.get(manifest_url, headers: auth_headers.merge(Accept: ::ContainerRegistry::Client::ACCEPTED_TYPES.join(',')))
if response.success?
file = Tempfile.new
@@ -20,7 +20,7 @@ module DependencyProxy
file.write(response)
file.flush
- yield(success(file: file, digest: response.headers['docker-content-digest']))
+ yield(success(file: file, digest: response.headers['docker-content-digest'], content_type: response.headers['content-type']))
ensure
file.close
file.unlink
diff --git a/app/services/deployments/older_deployments_drop_service.rb b/app/services/deployments/older_deployments_drop_service.rb
index e765d2484ea..9283a5c1279 100644
--- a/app/services/deployments/older_deployments_drop_service.rb
+++ b/app/services/deployments/older_deployments_drop_service.rb
@@ -12,7 +12,7 @@ module Deployments
return unless @deployment&.running?
older_deployments.find_each do |older_deployment|
- Gitlab::OptimisticLocking.retry_lock(older_deployment.deployable) do |deployable|
+ Gitlab::OptimisticLocking.retry_lock(older_deployment.deployable, name: 'older_deployments_drop') do |deployable|
deployable.drop(:forward_deployment_failure)
end
rescue => e
diff --git a/app/services/deployments/update_environment_service.rb b/app/services/deployments/update_environment_service.rb
index e9c2f41f626..98fedb9f699 100644
--- a/app/services/deployments/update_environment_service.rb
+++ b/app/services/deployments/update_environment_service.rb
@@ -25,11 +25,10 @@ module Deployments
def update_environment(deployment)
ActiveRecord::Base.transaction do
- if (url = expanded_environment_url)
- environment.external_url = url
- end
-
+ # Renew attributes at update
+ renew_external_url
renew_auto_stop_in
+ renew_deployment_tier
environment.fire_state_event(action)
if environment.save && !environment.stopped?
@@ -56,11 +55,25 @@ module Deployments
environment_options[:action] || 'start'
end
+ def renew_external_url
+ if (url = expanded_environment_url)
+ environment.external_url = url
+ end
+ end
+
def renew_auto_stop_in
return unless deployable
environment.auto_stop_in = deployable.environment_auto_stop_in
end
+
+ def renew_deployment_tier
+ return unless deployable
+
+ if (tier = deployable.environment_deployment_tier)
+ environment.tier = tier
+ end
+ end
end
end
diff --git a/app/services/environments/schedule_to_delete_review_apps_service.rb b/app/services/environments/schedule_to_delete_review_apps_service.rb
new file mode 100644
index 00000000000..b3b86689748
--- /dev/null
+++ b/app/services/environments/schedule_to_delete_review_apps_service.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+module Environments
+ class ScheduleToDeleteReviewAppsService < ::BaseService
+ include ::Gitlab::ExclusiveLeaseHelpers
+
+ EXCLUSIVE_LOCK_KEY_BASE = 'environments:delete_review_apps:lock'
+ LOCK_TIMEOUT = 2.minutes
+
+ def execute
+ if validation_error = validate
+ return validation_error
+ end
+
+ mark_deletable_environments
+ end
+
+ private
+
+ def key
+ "#{EXCLUSIVE_LOCK_KEY_BASE}:#{project.id}"
+ end
+
+ def dry_run?
+ return true if params[:dry_run].nil?
+
+ params[:dry_run]
+ end
+
+ def validate
+ return if can?(current_user, :destroy_environment, project)
+
+ Result.new(error_message: "You do not have permission to destroy environments in this project", status: :unauthorized)
+ end
+
+ def mark_deletable_environments
+ in_lock(key, ttl: LOCK_TIMEOUT, retries: 1) do
+ unsafe_mark_deletable_environments
+ end
+
+ rescue FailedToObtainLockError
+ Result.new(error_message: "Another process is already processing a delete request. Please retry later.", status: :conflict)
+ end
+
+ def unsafe_mark_deletable_environments
+ result = Result.new
+ environments = project.environments
+ .not_scheduled_for_deletion
+ .stopped_review_apps(params[:before], params[:limit])
+
+ # Check if the actor has write permission to a potentially-protected environment.
+ deletable, failed = *environments.partition { |env| current_user.can?(:destroy_environment, env) }
+
+ if deletable.any? && failed.empty?
+ mark_for_deletion(deletable) unless dry_run?
+ result.set_status(:ok)
+ result.set_scheduled_entries(deletable)
+ else
+ result.set_status(
+ :bad_request,
+ error_message: "Failed to authorize deletions for some or all of the environments. Ask someone with more permissions to delete the environments."
+ )
+
+ result.set_unprocessable_entries(failed)
+ end
+
+ result
+ end
+
+ def mark_for_deletion(deletable_environments)
+ Environment.for_id(deletable_environments).schedule_to_delete
+ end
+
+ class Result
+ attr_accessor :scheduled_entries, :unprocessable_entries, :error_message, :status
+
+ def initialize(scheduled_entries: [], unprocessable_entries: [], error_message: nil, status: nil)
+ self.scheduled_entries = scheduled_entries
+ self.unprocessable_entries = unprocessable_entries
+ self.error_message = error_message
+ self.status = status
+ end
+
+ def success?
+ status == :ok
+ end
+
+ def set_status(status, error_message: nil)
+ self.status = status
+ self.error_message = error_message
+ end
+
+ def set_scheduled_entries(entries)
+ self.scheduled_entries = entries
+ end
+
+ def set_unprocessable_entries(entries)
+ self.unprocessable_entries = entries
+ end
+ end
+ end
+end
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index 06a3b31c665..3ead2323588 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -33,7 +33,7 @@ module Groups
Group.transaction do
if @group.save
@group.add_owner(current_user)
- @group.create_namespace_settings
+ @group.create_namespace_settings unless @group.namespace_settings
Service.create_from_active_default_integrations(@group, :group_id)
OnboardingProgress.onboard(@group)
end
diff --git a/app/services/groups/destroy_service.rb b/app/services/groups/destroy_service.rb
index c7107e2fa56..a27330d1104 100644
--- a/app/services/groups/destroy_service.rb
+++ b/app/services/groups/destroy_service.rb
@@ -31,11 +31,11 @@ module Groups
# If any other groups are shared with the group that is being destroyed,
# we should specifically trigger update of all project authorizations
- # for users that are the members of this group.
+ # for users that are the direct members of this group.
# If not, the project authorization records of these users to projects within the shared groups
# will never be removed, causing inconsistencies with access permissions.
if any_other_groups_are_shared_with_this_group?
- user_ids_for_project_authorizations_refresh = group.user_ids_for_project_authorizations
+ user_ids_for_project_authorizations_refresh = group.users_ids_of_direct_members
end
group.destroy
diff --git a/app/services/groups/group_links/create_service.rb b/app/services/groups/group_links/create_service.rb
index 589ac7ccde7..57c746c3841 100644
--- a/app/services/groups/group_links/create_service.rb
+++ b/app/services/groups/group_links/create_service.rb
@@ -18,7 +18,7 @@ module Groups
)
if link.save
- group.refresh_members_authorized_projects
+ group.refresh_members_authorized_projects(direct_members_only: true)
success(link: link)
else
error(link.errors.full_messages.to_sentence, 409)
diff --git a/app/services/groups/group_links/destroy_service.rb b/app/services/groups/group_links/destroy_service.rb
index b0d496ae78c..05504a80f46 100644
--- a/app/services/groups/group_links/destroy_service.rb
+++ b/app/services/groups/group_links/destroy_service.rb
@@ -16,7 +16,7 @@ module Groups
groups_to_refresh = links.map(&:shared_with_group)
groups_to_refresh.uniq.each do |group|
- group.refresh_members_authorized_projects
+ group.refresh_members_authorized_projects(direct_members_only: true)
end
else
Gitlab::AppLogger.info(
diff --git a/app/services/groups/group_links/update_service.rb b/app/services/groups/group_links/update_service.rb
index 71b52cb616c..3703d535482 100644
--- a/app/services/groups/group_links/update_service.rb
+++ b/app/services/groups/group_links/update_service.rb
@@ -13,7 +13,7 @@ module Groups
group_link.update!(group_link_params)
if requires_authorization_refresh?(group_link_params)
- group_link.shared_with_group.refresh_members_authorized_projects
+ group_link.shared_with_group.refresh_members_authorized_projects(direct_members_only: true)
end
end
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index 847c5eb4397..3ee5a185f42 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -2,6 +2,9 @@
module Import
class GithubService < Import::BaseService
+ include ActiveSupport::NumberHelper
+ include Gitlab::Utils::StrongMemoize
+
attr_accessor :client
attr_reader :params, :current_user
@@ -14,6 +17,10 @@ module Import
return error(_('This namespace has already been taken! Please choose another one.'), :unprocessable_entity)
end
+ if oversized?
+ return error(oversize_error_message, :unprocessable_entity)
+ end
+
project = create_project(access_params, provider)
if project.persisted?
@@ -32,7 +39,8 @@ module Import
target_namespace,
current_user,
type: provider,
- **access_params).execute(extra_project_attrs)
+ **access_params
+ ).execute(extra_project_attrs)
end
def repo
@@ -55,6 +63,30 @@ module Import
{}
end
+ def oversized?
+ repository_size_limit > 0 && repo.size > repository_size_limit
+ end
+
+ def oversize_error_message
+ _('"%{repository_name}" size (%{repository_size}) is larger than the limit of %{limit}.') % {
+ repository_name: repo.name,
+ repository_size: number_to_human_size(repo.size),
+ limit: number_to_human_size(repository_size_limit)
+ }
+ end
+
+ def repository_size_limit
+ strong_memoize :repository_size_limit do
+ namespace_limit = target_namespace.repository_size_limit.to_i
+
+ if namespace_limit > 0
+ namespace_limit
+ else
+ Gitlab::CurrentSettings.repository_size_limit.to_i
+ end
+ end
+ end
+
def authorized?
can?(current_user, :create_projects, target_namespace)
end
diff --git a/app/services/issuable/clone/base_service.rb b/app/services/issuable/clone/base_service.rb
index b2f9c083b5b..3c2bc527b12 100644
--- a/app/services/issuable/clone/base_service.rb
+++ b/app/services/issuable/clone/base_service.rb
@@ -3,12 +3,13 @@
module Issuable
module Clone
class BaseService < IssuableBaseService
- attr_reader :original_entity, :new_entity
+ attr_reader :original_entity, :new_entity, :target_project
alias_method :old_project, :project
- def execute(original_entity, new_project = nil)
+ def execute(original_entity, target_project = nil)
@original_entity = original_entity
+ @target_project = target_project
# Using transaction because of a high resources footprint
# on rewriting notes (unfolding references)
@@ -77,6 +78,12 @@ module Issuable
new_entity.project.group
end
end
+
+ def relative_position
+ return if original_entity.project.root_ancestor.id != target_project.root_ancestor.id
+
+ original_entity.relative_position
+ end
end
end
end
diff --git a/app/services/issuable/process_assignees.rb b/app/services/issuable/process_assignees.rb
new file mode 100644
index 00000000000..c9c6b0bed85
--- /dev/null
+++ b/app/services/issuable/process_assignees.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+# This follows the rules specified in the specs.
+# See spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
+
+module Issuable
+ class ProcessAssignees
+ def initialize(assignee_ids:, add_assignee_ids:, remove_assignee_ids:, existing_assignee_ids: nil, extra_assignee_ids: nil)
+ @assignee_ids = assignee_ids
+ @add_assignee_ids = add_assignee_ids
+ @remove_assignee_ids = remove_assignee_ids
+ @existing_assignee_ids = existing_assignee_ids || []
+ @extra_assignee_ids = extra_assignee_ids || []
+ end
+
+ def execute
+ if assignee_ids.blank?
+ updated_new_assignees = new_assignee_ids
+ updated_new_assignees |= add_assignee_ids if add_assignee_ids
+ updated_new_assignees -= remove_assignee_ids if remove_assignee_ids
+ else
+ updated_new_assignees = assignee_ids
+ end
+
+ updated_new_assignees.uniq
+ end
+
+ private
+
+ attr_accessor :assignee_ids, :add_assignee_ids, :remove_assignee_ids, :existing_assignee_ids, :extra_assignee_ids
+
+ def new_assignee_ids
+ existing_assignee_ids | extra_assignee_ids
+ end
+ end
+end
diff --git a/app/services/issue_rebalancing_service.rb b/app/services/issue_rebalancing_service.rb
index 849afc4edb8..db5c5ddfb84 100644
--- a/app/services/issue_rebalancing_service.rb
+++ b/app/services/issue_rebalancing_service.rb
@@ -2,6 +2,7 @@
class IssueRebalancingService
MAX_ISSUE_COUNT = 10_000
+ BATCH_SIZE = 100
TooManyIssues = Class.new(StandardError)
def initialize(issue)
@@ -21,13 +22,13 @@ class IssueRebalancingService
Issue.transaction do
assign_positions(start, indexed_ids)
.sort_by(&:first)
- .each_slice(100) do |pairs_with_position|
+ .each_slice(BATCH_SIZE) do |pairs_with_position|
update_positions(pairs_with_position, 'rebalance issue positions in batches ordered by id')
end
end
else
Issue.transaction do
- indexed_ids.each_slice(100) do |pairs|
+ indexed_ids.each_slice(BATCH_SIZE) do |pairs|
pairs_with_position = assign_positions(start, pairs)
update_positions(pairs_with_position, 'rebalance issue positions')
end
diff --git a/app/services/issues/clone_service.rb b/app/services/issues/clone_service.rb
index 4c9c34f1247..b64e4687a87 100644
--- a/app/services/issues/clone_service.rb
+++ b/app/services/issues/clone_service.rb
@@ -47,6 +47,7 @@ module Issues
new_params = {
id: nil,
iid: nil,
+ relative_position: relative_position,
project: target_project,
author: current_user,
assignee_ids: original_entity.assignee_ids
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index d2285a375a1..3fdc66ed84e 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -28,6 +28,7 @@ module Issues
issue.run_after_commit do
NewIssueWorker.perform_async(issue.id, user.id)
IssuePlacementWorker.perform_async(nil, issue.project_id)
+ Namespaces::OnboardingIssueCreatedWorker.perform_async(issue.namespace.id)
end
end
diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb
index 90ccbd8ed21..c1afb8f456d 100644
--- a/app/services/issues/move_service.rb
+++ b/app/services/issues/move_service.rb
@@ -48,13 +48,14 @@ module Issues
def create_new_entity
new_params = {
- id: nil,
- iid: nil,
- project: target_project,
- author: original_entity.author,
- assignee_ids: original_entity.assignee_ids,
- moved_issue: true
- }
+ id: nil,
+ iid: nil,
+ relative_position: relative_position,
+ project: target_project,
+ author: original_entity.author,
+ assignee_ids: original_entity.assignee_ids,
+ moved_issue: true
+ }
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
diff --git a/app/services/jira_import/users_importer.rb b/app/services/jira_import/users_importer.rb
index 438a74343a5..3de165c1014 100644
--- a/app/services/jira_import/users_importer.rb
+++ b/app/services/jira_import/users_importer.rb
@@ -13,7 +13,7 @@ module JiraImport
ServiceResponse.success(payload: mapped_users)
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET, Errno::ECONNREFUSED, URI::InvalidURIError, JIRA::HTTPError, OpenSSL::SSL::SSLError => error
- Gitlab::ErrorTracking.track_exception(error, project_id: project.id)
+ Gitlab::ErrorTracking.log_exception(error, project_id: project.id)
ServiceResponse.error(message: "There was an error when communicating to Jira")
rescue Projects::ImportService::Error => error
ServiceResponse.error(message: error.message)
diff --git a/app/services/members/invite_service.rb b/app/services/members/invite_service.rb
index 60ebbaface2..169500d08f0 100644
--- a/app/services/members/invite_service.rb
+++ b/app/services/members/invite_service.rb
@@ -2,112 +2,97 @@
module Members
class InviteService < Members::BaseService
- DEFAULT_LIMIT = 100
+ BlankEmailsError = Class.new(StandardError)
+ TooManyEmailsError = Class.new(StandardError)
- attr_reader :errors
+ def initialize(*args)
+ super
- def initialize(current_user, params)
- @current_user, @params = current_user, params.dup
@errors = {}
+ @emails = params[:email]&.split(',')&.uniq&.flatten
end
def execute(source)
- return error(s_('Email cannot be blank')) if params[:email].blank?
+ validate_emails!
- emails = params[:email].split(',').uniq.flatten
- return error(s_("Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }) if
- user_limit && emails.size > user_limit
-
- emails.each do |email|
- next if existing_member?(source, email)
- next if existing_invite?(source, email)
- next if existing_request?(source, email)
-
- if existing_user?(email)
- add_existing_user_as_member(current_user, source, params, email)
- next
- end
-
- invite_new_member_and_user(current_user, source, params, email)
- end
-
- return success unless errors.any?
-
- error(errors)
+ @source = source
+ emails.each(&method(:process_email))
+ result
+ rescue BlankEmailsError, TooManyEmailsError => e
+ error(e.message)
end
private
- def invite_new_member_and_user(current_user, source, params, email)
- new_member = (source.class.name + 'Member').constantize.create(source_id: source.id,
- user_id: nil,
- access_level: params[:access_level],
- invite_email: email,
- created_by_id: current_user.id,
- expires_at: params[:expires_at])
-
- unless new_member.valid? && new_member.persisted?
- errors[params[:email]] = new_member.errors.full_messages.to_sentence
- end
- end
+ attr_reader :source, :errors, :emails
- def add_existing_user_as_member(current_user, source, params, email)
- new_member = create_member(current_user, existing_user(email), source, params.merge({ invite_email: email }))
+ def validate_emails!
+ raise BlankEmailsError, s_('AddMember|Email cannot be blank') if emails.blank?
- unless new_member.valid? && new_member.persisted?
- errors[email] = new_member.errors.full_messages.to_sentence
+ if user_limit && emails.size > user_limit
+ raise TooManyEmailsError, s_("AddMember|Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }
end
end
- def create_member(current_user, user, source, params)
- source.add_user(user, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
+ def user_limit
+ limit = params.fetch(:limit, Members::CreateService::DEFAULT_LIMIT)
+
+ limit < 0 ? nil : limit
end
- def user_limit
- limit = params.fetch(:limit, DEFAULT_LIMIT)
+ def process_email(email)
+ return if existing_member?(email)
+ return if existing_invite?(email)
+ return if existing_request?(email)
- limit && limit < 0 ? nil : limit
+ add_member(email)
end
- def existing_member?(source, email)
+ def existing_member?(email)
existing_member = source.members.with_user_by_email(email).exists?
if existing_member
- errors[email] = "Already a member of #{source.name}"
+ errors[email] = s_("AddMember|Already a member of %{source_name}") % { source_name: source.name }
return true
end
false
end
- def existing_invite?(source, email)
+ def existing_invite?(email)
existing_invite = source.members.search_invite_email(email).exists?
if existing_invite
- errors[email] = "Member already invited to #{source.name}"
+ errors[email] = s_("AddMember|Member already invited to %{source_name}") % { source_name: source.name }
return true
end
false
end
- def existing_request?(source, email)
+ def existing_request?(email)
existing_request = source.requesters.with_user_by_email(email).exists?
if existing_request
- errors[email] = "Member cannot be invited because they already requested to join #{source.name}"
+ errors[email] = s_("AddMember|Member cannot be invited because they already requested to join %{source_name}") % { source_name: source.name }
return true
end
false
end
- def existing_user(email)
- User.find_by_email(email)
+ def add_member(email)
+ new_member = source.add_user(email, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
+
+ errors[email] = new_member.errors.full_messages.to_sentence if new_member.invalid?
end
- def existing_user?(email)
- existing_user(email).present?
+ def result
+ if errors.any?
+ error(errors)
+ else
+ success
+ end
end
end
end
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index 03fcb5a4c1b..b22afe8a20d 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -3,8 +3,18 @@
module MergeRequests
class AfterCreateService < MergeRequests::BaseService
def execute(merge_request)
+ prepare_merge_request(merge_request)
+ merge_request.mark_as_unchecked if merge_request.preparing?
+ end
+
+ private
+
+ def prepare_merge_request(merge_request)
event_service.open_mr(merge_request, current_user)
+
merge_request_activity_counter.track_create_mr_action(user: current_user)
+ merge_request_activity_counter.track_mr_including_ci_config(user: current_user, merge_request: merge_request)
+
notification_service.new_merge_request(merge_request, current_user)
create_pipeline_for(merge_request, current_user)
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index 6bd31e26748..317cd11a69d 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -181,7 +181,7 @@ module MergeRequests
}
if exception
- Gitlab::ErrorTracking.with_context(current_user) do
+ Gitlab::ApplicationContext.with_context(user: current_user) do
Gitlab::ErrorTracking.track_exception(exception, data)
end
diff --git a/app/services/merge_requests/build_service.rb b/app/services/merge_requests/build_service.rb
index 12c901aa1a1..e4d3c91d13e 100644
--- a/app/services/merge_requests/build_service.rb
+++ b/app/services/merge_requests/build_service.rb
@@ -58,6 +58,7 @@ module MergeRequests
:compare_commits,
:wip_title,
:description,
+ :first_multiline_commit,
:errors,
to: :merge_request
@@ -196,7 +197,8 @@ module MergeRequests
# interpreted as the user wants to close that issue on this project.
#
# For example:
- # - Issue 112 exists, title: Emoji don't show up in commit title
+ # - Issue 112 exists
+ # - title: Emoji don't show up in commit title
# - Source branch is: 112-fix-mep-mep
#
# Will lead to:
@@ -205,7 +207,7 @@ module MergeRequests
# more than one commit in the MR
#
def assign_title_and_description
- assign_title_and_description_from_single_commit
+ assign_title_and_description_from_commits
merge_request.title ||= title_from_issue if target_project.issues_enabled? || target_project.external_issue_tracker
merge_request.title ||= source_branch.titleize.humanize
merge_request.title = wip_title if compare_commits.empty?
@@ -240,12 +242,16 @@ module MergeRequests
end
end
- def assign_title_and_description_from_single_commit
+ def assign_title_and_description_from_commits
commits = compare_commits
- return unless commits&.count == 1
+ if commits&.count == 1
+ commit = commits.first
+ else
+ commit = first_multiline_commit
+ return unless commit
+ end
- commit = commits.first
merge_request.title ||= commit.title
merge_request.description ||= commit.description.try(:strip)
end
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index fc4405ef704..27f474b0fe7 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -107,8 +107,7 @@ module MergeRequests
log_info("Post merge finished on JID #{merge_jid} with state #{state}")
if delete_source_branch?
- ::Branches::DeleteService.new(@merge_request.source_project, branch_deletion_user)
- .execute(merge_request.source_branch)
+ MergeRequests::DeleteSourceBranchWorker.perform_async(@merge_request.id, @merge_request.source_branch_sha, branch_deletion_user.id)
end
end
diff --git a/app/services/merge_requests/post_merge_service.rb b/app/services/merge_requests/post_merge_service.rb
index aafba9bfcef..4d7d632ee14 100644
--- a/app/services/merge_requests/post_merge_service.rb
+++ b/app/services/merge_requests/post_merge_service.rb
@@ -20,7 +20,6 @@ module MergeRequests
merge_request_activity_counter.track_merge_mr_action(user: current_user)
notification_service.merge_mr(merge_request, current_user)
execute_hooks(merge_request, 'merge')
- retarget_chain_merge_requests(merge_request)
invalidate_cache_counts(merge_request, users: merge_request.assignees | merge_request.reviewers)
merge_request.update_project_counter_caches
delete_non_latest_diffs(merge_request)
@@ -31,34 +30,6 @@ module MergeRequests
private
- def retarget_chain_merge_requests(merge_request)
- return unless Feature.enabled?(:retarget_merge_requests, merge_request.target_project)
-
- # we can only retarget MRs that are targeting the same project
- # and have a remove source branch set
- return unless merge_request.for_same_project? && merge_request.remove_source_branch?
-
- # find another merge requests that
- # - as a target have a current source project and branch
- other_merge_requests = merge_request.source_project
- .merge_requests
- .opened
- .by_target_branch(merge_request.source_branch)
- .preload_source_project
- .at_most(MAX_RETARGET_MERGE_REQUESTS)
-
- other_merge_requests.find_each do |other_merge_request|
- # Update only MRs on projects that we have access to
- next unless can?(current_user, :update_merge_request, other_merge_request.source_project)
-
- ::MergeRequests::UpdateService
- .new(other_merge_request.source_project, current_user,
- target_branch: merge_request.target_branch,
- target_branch_was_deleted: true)
- .execute(other_merge_request)
- end
- end
-
def close_issues(merge_request)
return unless merge_request.target_branch == project.default_branch
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index ed977a5a872..0fb16597aff 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -44,6 +44,7 @@ module MergeRequests
notify_about_push(mr)
mark_mr_as_draft_from_commits(mr)
execute_mr_web_hooks(mr)
+ merge_request_activity_counter.track_mr_including_ci_config(user: mr.author, merge_request: mr)
end
true
@@ -74,7 +75,8 @@ module MergeRequests
def post_merge_manually_merged
commit_ids = @commits.map(&:id)
merge_requests = @project.merge_requests.opened
- .preload(:latest_merge_request_diff)
+ .preload_project_and_latest_diff
+ .preload_latest_diff_commit
.where(target_branch: @push.branch_name).to_a
.select(&:diff_head_commit)
.select do |merge_request|
@@ -116,11 +118,14 @@ module MergeRequests
# Note: we should update merge requests from forks too
def reload_merge_requests
merge_requests = @project.merge_requests.opened
- .by_source_or_target_branch(@push.branch_name).to_a
+ .by_source_or_target_branch(@push.branch_name)
+ .preload_project_and_latest_diff
- merge_requests += merge_requests_for_forks.to_a
+ merge_requests_from_forks = merge_requests_for_forks
+ .preload_project_and_latest_diff
- filter_merge_requests(merge_requests).each do |merge_request|
+ merge_requests_array = merge_requests.to_a + merge_requests_from_forks.to_a
+ filter_merge_requests(merge_requests_array).each do |merge_request|
if branch_and_project_match?(merge_request) || @push.force_push?
merge_request.reload_diff(current_user)
# Clear existing merge error if the push were directed at the
diff --git a/app/services/merge_requests/retarget_chain_service.rb b/app/services/merge_requests/retarget_chain_service.rb
new file mode 100644
index 00000000000..f24d67243c9
--- /dev/null
+++ b/app/services/merge_requests/retarget_chain_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class RetargetChainService < MergeRequests::BaseService
+ MAX_RETARGET_MERGE_REQUESTS = 4
+
+ def execute(merge_request)
+ return unless Feature.enabled?(:retarget_merge_requests, merge_request.target_project, default_enabled: :yaml)
+
+ # we can only retarget MRs that are targeting the same project
+ return unless merge_request.for_same_project? && merge_request.merged?
+
+ # find another merge requests that
+ # - as a target have a current source project and branch
+ other_merge_requests = merge_request.source_project
+ .merge_requests
+ .opened
+ .by_target_branch(merge_request.source_branch)
+ .preload_source_project
+ .at_most(MAX_RETARGET_MERGE_REQUESTS)
+
+ other_merge_requests.find_each do |other_merge_request|
+ # Update only MRs on projects that we have access to
+ next unless can?(current_user, :update_merge_request, other_merge_request.source_project)
+
+ ::MergeRequests::UpdateService
+ .new(other_merge_request.source_project, current_user,
+ target_branch: merge_request.target_branch,
+ target_branch_was_deleted: true)
+ .execute(other_merge_request)
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index 1707daff734..f5e14797f7e 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -31,54 +31,37 @@ module MergeRequests
old_mentioned_users = old_associations.fetch(:mentioned_users, [])
old_assignees = old_associations.fetch(:assignees, [])
old_reviewers = old_associations.fetch(:reviewers, [])
+ old_timelogs = old_associations.fetch(:timelogs, [])
+ changed_fields = merge_request.previous_changes.keys
- if has_changes?(merge_request, old_labels: old_labels, old_assignees: old_assignees, old_reviewers: old_reviewers)
- todo_service.resolve_todos_for_target(merge_request, current_user)
- end
+ resolve_todos(merge_request, old_labels, old_assignees, old_reviewers)
if merge_request.previous_changes.include?('title') ||
merge_request.previous_changes.include?('description')
todo_service.update_merge_request(merge_request, current_user, old_mentioned_users)
end
- if merge_request.previous_changes.include?('target_branch')
- create_branch_change_note(merge_request,
- 'target',
- target_branch_was_deleted ? 'delete' : 'update',
- merge_request.previous_changes['target_branch'].first,
- merge_request.target_branch)
-
- abort_auto_merge(merge_request, 'target branch was changed')
- end
-
+ handle_target_branch_change(merge_request)
handle_assignees_change(merge_request, old_assignees) if merge_request.assignees != old_assignees
-
handle_reviewers_change(merge_request, old_reviewers) if merge_request.reviewers != old_reviewers
-
- if merge_request.previous_changes.include?('target_branch') ||
- merge_request.previous_changes.include?('source_branch')
- merge_request.mark_as_unchecked
- end
-
handle_milestone_change(merge_request)
+ handle_draft_status_change(merge_request, changed_fields)
- added_labels = merge_request.labels - old_labels
- if added_labels.present?
- notification_service.async.relabeled_merge_request(
- merge_request,
- added_labels,
- current_user
- )
- end
+ track_title_and_desc_edits(changed_fields)
+ track_discussion_lock_toggle(merge_request, changed_fields)
+ track_time_estimate_and_spend_edits(merge_request, old_timelogs, changed_fields)
+ track_labels_change(merge_request, old_labels)
- added_mentions = merge_request.mentioned_users(current_user) - old_mentioned_users
+ notify_if_labels_added(merge_request, old_labels)
+ notify_if_mentions_added(merge_request, old_mentioned_users)
- if added_mentions.present?
- notification_service.async.new_mentions_in_merge_request(
- merge_request,
- added_mentions,
- current_user
- )
+ # Since #mark_as_unchecked triggers an update action through the MR's
+ # state machine, we want to push this as far down in the process so we
+ # avoid resetting #ActiveModel::Dirty
+ #
+ if merge_request.previous_changes.include?('target_branch') ||
+ merge_request.previous_changes.include?('source_branch')
+ merge_request.mark_as_unchecked
end
end
@@ -95,56 +78,128 @@ module MergeRequests
MergeRequests::CloseService
end
- def before_update(issuable, skip_spam_check: false)
- return unless issuable.changed?
-
- @issuable_changes = issuable.changes
- end
-
def after_update(issuable)
issuable.cache_merge_request_closes_issues!(current_user)
+ end
- return unless @issuable_changes
+ private
- %w(title description).each do |action|
- next unless @issuable_changes.key?(action)
+ attr_reader :target_branch_was_deleted
+
+ def track_title_and_desc_edits(changed_fields)
+ tracked_fields = %w(title description)
+
+ return unless changed_fields.any? { |field| tracked_fields.include?(field) }
+
+ tracked_fields.each do |action|
+ next unless changed_fields.include?(action)
- # Track edits to title or description
- #
merge_request_activity_counter
.public_send("track_#{action}_edit_action".to_sym, user: current_user) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
- # Track changes to Draft/WIP status
- #
- if action == "title"
- old_title, new_title = @issuable_changes["title"]
- old_title_wip = MergeRequest.work_in_progress?(old_title)
- new_title_wip = MergeRequest.work_in_progress?(new_title)
-
- if !old_title_wip && new_title_wip
- # Marked as Draft/WIP
- #
- merge_request_activity_counter
- .track_marked_as_draft_action(user: current_user)
- elsif old_title_wip && !new_title_wip
- # Unmarked as Draft/WIP
- #
- merge_request_activity_counter
- .track_unmarked_as_draft_action(user: current_user)
- end
- end
+ def track_discussion_lock_toggle(merge_request, changed_fields)
+ return unless changed_fields.include?('discussion_locked')
+
+ if merge_request.discussion_locked
+ merge_request_activity_counter.track_discussion_locked_action(user: current_user)
+ else
+ merge_request_activity_counter.track_discussion_unlocked_action(user: current_user)
end
end
- private
+ def track_time_estimate_and_spend_edits(merge_request, old_timelogs, changed_fields)
+ merge_request_activity_counter.track_time_estimate_changed_action(user: current_user) if changed_fields.include?('time_estimate')
+ merge_request_activity_counter.track_time_spent_changed_action(user: current_user) if old_timelogs != merge_request.timelogs
+ end
- attr_reader :target_branch_was_deleted
+ def track_labels_change(merge_request, old_labels)
+ return if Set.new(merge_request.labels) == Set.new(old_labels)
+
+ merge_request_activity_counter.track_labels_changed_action(user: current_user)
+ end
+
+ def notify_if_labels_added(merge_request, old_labels)
+ added_labels = merge_request.labels - old_labels
+
+ return unless added_labels.present?
+
+ notification_service.async.relabeled_merge_request(
+ merge_request,
+ added_labels,
+ current_user
+ )
+ end
+
+ def notify_if_mentions_added(merge_request, old_mentioned_users)
+ added_mentions = merge_request.mentioned_users(current_user) - old_mentioned_users
+
+ return unless added_mentions.present?
+
+ notification_service.async.new_mentions_in_merge_request(
+ merge_request,
+ added_mentions,
+ current_user
+ )
+ end
+
+ def resolve_todos(merge_request, old_labels, old_assignees, old_reviewers)
+ return unless has_changes?(merge_request, old_labels: old_labels, old_assignees: old_assignees, old_reviewers: old_reviewers)
+
+ todo_service.resolve_todos_for_target(merge_request, current_user)
+ end
+
+ def handle_target_branch_change(merge_request)
+ return unless merge_request.previous_changes.include?('target_branch')
+
+ create_branch_change_note(
+ merge_request,
+ 'target',
+ target_branch_was_deleted ? 'delete' : 'update',
+ merge_request.previous_changes['target_branch'].first,
+ merge_request.target_branch
+ )
+
+ abort_auto_merge(merge_request, 'target branch was changed')
+ end
+
+ def handle_draft_status_change(merge_request, changed_fields)
+ return unless changed_fields.include?("title")
+
+ old_title, new_title = merge_request.previous_changes["title"]
+ old_title_wip = MergeRequest.work_in_progress?(old_title)
+ new_title_wip = MergeRequest.work_in_progress?(new_title)
+
+ if !old_title_wip && new_title_wip
+ # Marked as Draft/WIP
+ #
+ merge_request_activity_counter
+ .track_marked_as_draft_action(user: current_user)
+ elsif old_title_wip && !new_title_wip
+ # Unmarked as Draft/WIP
+ #
+ notify_draft_status_changed(merge_request)
+
+ merge_request_activity_counter
+ .track_unmarked_as_draft_action(user: current_user)
+ end
+ end
+
+ def notify_draft_status_changed(merge_request)
+ notification_service.async.change_in_merge_request_draft_status(
+ merge_request,
+ current_user
+ )
+ end
def handle_milestone_change(merge_request)
return if skip_milestone_email
return unless merge_request.previous_changes.include?('milestone_id')
+ merge_request_activity_counter.track_milestone_changed_action(user: current_user)
+
if merge_request.milestone.nil?
notification_service.async.removed_milestone_merge_request(merge_request, current_user)
else
@@ -159,6 +214,7 @@ module MergeRequests
new_assignees = merge_request.assignees - old_assignees
merge_request_activity_counter.track_users_assigned_to_mr(users: new_assignees)
+ merge_request_activity_counter.track_assignees_changed_action(user: current_user)
end
def handle_reviewers_change(merge_request, old_reviewers)
@@ -170,6 +226,7 @@ module MergeRequests
new_reviewers = merge_request.reviewers - old_reviewers
merge_request_activity_counter.track_users_review_requested(users: new_reviewers)
+ merge_request_activity_counter.track_reviewers_changed_action(user: current_user)
end
def create_branch_change_note(issuable, branch_type, event_type, old_branch, new_branch)
diff --git a/app/services/namespaces/in_product_marketing_emails_service.rb b/app/services/namespaces/in_product_marketing_emails_service.rb
index 45b4619ddbe..f009f5d8538 100644
--- a/app/services/namespaces/in_product_marketing_emails_service.rb
+++ b/app/services/namespaces/in_product_marketing_emails_service.rb
@@ -63,7 +63,10 @@ module Namespaces
.completed_actions_with_latest_in_range(completed_actions, range)
.incomplete_actions(incomplete_action)
- Group.joins(:onboarding_progress).merge(onboarding_progress_scope)
+ # Filtering out sub-groups is a temporary fix to prevent calling
+ # `.root_ancestor` on groups that are not root groups.
+ # See https://gitlab.com/groups/gitlab-org/-/epics/5594 for more information.
+ Group.where(parent_id: nil).joins(:onboarding_progress).merge(onboarding_progress_scope)
end
def users_for_group(group)
diff --git a/app/services/notes/build_service.rb b/app/services/notes/build_service.rb
index cf21818a886..8c250526efc 100644
--- a/app/services/notes/build_service.rb
+++ b/app/services/notes/build_service.rb
@@ -3,32 +3,36 @@
module Notes
class BuildService < ::BaseService
def execute
- should_resolve = false
in_reply_to_discussion_id = params.delete(:in_reply_to_discussion_id)
+ discussion = nil
if in_reply_to_discussion_id.present?
discussion = find_discussion(in_reply_to_discussion_id)
- unless discussion && can?(current_user, :create_note, discussion.noteable)
- note = Note.new
- note.errors.add(:base, _('Discussion to reply to cannot be found'))
- return note
- end
+ return discussion_not_found unless discussion && can?(current_user, :create_note, discussion.noteable)
discussion = discussion.convert_to_discussion! if discussion.can_convert_to_discussion?
params.merge!(discussion.reply_attributes)
- should_resolve = discussion.resolved?
end
+ new_note(params, discussion)
+ end
+
+ private
+
+ def new_note(params, discussion)
note = Note.new(params)
note.project = project
note.author = current_user
- if should_resolve
- note.resolve_without_save(current_user)
- end
+ parent_confidential = discussion&.confidential?
+ can_set_confidential = can?(current_user, :mark_note_as_confidential, note)
+ return discussion_not_found if parent_confidential && !can_set_confidential
+
+ note.confidential = (parent_confidential.nil? && can_set_confidential ? params.delete(:confidential) : parent_confidential)
+ note.resolve_without_save(current_user) if discussion&.resolved?
note
end
@@ -39,5 +43,11 @@ module Notes
Note.find_discussion(discussion_id)
end
end
+
+ def discussion_not_found
+ note = Note.new
+ note.errors.add(:base, _('Discussion to reply to cannot be found'))
+ note
+ end
end
end
diff --git a/app/services/notes/update_service.rb b/app/services/notes/update_service.rb
index 857ffbb6965..76f9b6369b3 100644
--- a/app/services/notes/update_service.rb
+++ b/app/services/notes/update_service.rb
@@ -7,12 +7,7 @@ module Notes
old_mentioned_users = note.mentioned_users(current_user).to_a
- note.assign_attributes(params.merge(updated_by: current_user))
-
- note.with_transaction_returning_status do
- update_confidentiality(note)
- note.save
- end
+ note.assign_attributes(params)
track_note_edit_usage_for_issues(note) if note.for_issue?
track_note_edit_usage_for_merge_requests(note) if note.for_merge_request?
@@ -28,6 +23,15 @@ module Notes
note.note = content
end
+ if note.note_changed?
+ note.assign_attributes(last_edited_at: Time.current, updated_by: current_user)
+ end
+
+ note.with_transaction_returning_status do
+ update_confidentiality(note)
+ note.save
+ end
+
unless only_commands || note.for_personal_snippet?
note.create_new_cross_references!(current_user)
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 50247532f69..fc2eb1dc4e4 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -66,10 +66,10 @@ class NotificationService
# Notify the owner of the personal access token, when it is about to expire
# And mark the token with about_to_expire_delivered
- def access_token_about_to_expire(user)
+ def access_token_about_to_expire(user, token_names)
return unless user.can?(:receive_notifications)
- mailer.access_token_about_to_expire_email(user).deliver_later
+ mailer.access_token_about_to_expire_email(user, token_names).deliver_later
end
# Notify the user when at least one of their personal access tokens has expired today
@@ -95,7 +95,7 @@ class NotificationService
# * users with custom level checked with "new issue"
#
def new_issue(issue, current_user)
- new_resource_email(issue, :new_issue_email)
+ new_resource_email(issue, current_user, :new_issue_email)
end
# When issue text is updated, we should send an email to:
@@ -176,7 +176,7 @@ class NotificationService
#
# In EE, approvers of the merge request are also included
def new_merge_request(merge_request, current_user)
- new_resource_email(merge_request, :new_merge_request_email)
+ new_resource_email(merge_request, current_user, :new_merge_request_email)
end
def push_to_merge_request(merge_request, current_user, new_commits: [], existing_commits: [])
@@ -189,6 +189,20 @@ class NotificationService
end
end
+ def change_in_merge_request_draft_status(merge_request, current_user)
+ recipients = NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: "draft_status_change")
+
+ recipients.each do |recipient|
+ mailer.send(
+ :change_in_merge_request_draft_status_email,
+ recipient.user.id,
+ merge_request.id,
+ current_user.id,
+ recipient.reason
+ ).deliver_later
+ end
+ end
+
# When a merge request is found to be unmergeable, we should send an email to:
#
# * mr author
@@ -355,22 +369,28 @@ class NotificationService
end
def send_service_desk_notification(note)
- return unless Gitlab::ServiceDesk.supported?
return unless note.noteable_type == 'Issue'
issue = note.noteable
- support_bot = User.support_bot
+ recipients = issue.email_participants_emails
+
+ return unless recipients.any?
- return unless issue.external_author.present?
- return unless issue.project.service_desk_enabled?
- return if note.author == support_bot
- return unless issue.subscribed?(support_bot, issue.project)
+ support_bot = User.support_bot
+ recipients.delete(issue.external_author) if note.author == support_bot
- mailer.service_desk_new_note_email(issue.id, note.id).deliver_later
+ recipients.each do |recipient|
+ mailer.service_desk_new_note_email(issue.id, note.id, recipient).deliver_later
+ end
end
# Notify users when a new release is created
def send_new_release_notifications(release)
+ unless release.author&.can_trigger_notifications?
+ warn_skipping_notifications(release.author, release)
+ return false
+ end
+
recipients = NotificationRecipients::BuildService.build_new_release_recipients(release)
recipients.each do |recipient|
@@ -665,7 +685,12 @@ class NotificationService
end
def merge_when_pipeline_succeeds(merge_request, current_user)
- recipients = ::NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: 'merge_when_pipeline_succeeds')
+ recipients = ::NotificationRecipients::BuildService.build_recipients(
+ merge_request,
+ current_user,
+ action: 'merge_when_pipeline_succeeds',
+ custom_action: :merge_when_pipeline_succeeds
+ )
recipients.each do |recipient|
mailer.merge_when_pipeline_succeeds_email(recipient.user.id, merge_request.id, current_user.id).deliver_later
@@ -678,7 +703,12 @@ class NotificationService
protected
- def new_resource_email(target, method)
+ def new_resource_email(target, current_user, method)
+ unless current_user&.can_trigger_notifications?
+ warn_skipping_notifications(current_user, target)
+ return false
+ end
+
recipients = NotificationRecipients::BuildService.build_recipients(target, target.author, action: "new")
recipients.each do |recipient|
@@ -687,6 +717,11 @@ class NotificationService
end
def new_mentions_in_resource_email(target, new_mentioned_users, current_user, method)
+ unless current_user&.can_trigger_notifications?
+ warn_skipping_notifications(current_user, target)
+ return false
+ end
+
recipients = NotificationRecipients::BuildService.build_recipients(target, current_user, action: "new")
recipients = recipients.select {|r| new_mentioned_users.include?(r.user) }
@@ -820,6 +855,10 @@ class NotificationService
source.respond_to?(:group) && source.group
end
+
+ def warn_skipping_notifications(user, object)
+ Gitlab::AppLogger.warn(message: "Skipping sending notifications", user: user.id, klass: object.class, object_id: object.id)
+ end
end
NotificationService.prepend_if_ee('EE::NotificationService')
diff --git a/app/services/onboarding_progress_service.rb b/app/services/onboarding_progress_service.rb
index 241bd8a01ca..6d44c0a61ea 100644
--- a/app/services/onboarding_progress_service.rb
+++ b/app/services/onboarding_progress_service.rb
@@ -1,6 +1,24 @@
# frozen_string_literal: true
class OnboardingProgressService
+ class Async
+ attr_reader :namespace_id
+
+ def initialize(namespace_id)
+ @namespace_id = namespace_id
+ end
+
+ def execute(action:)
+ return unless OnboardingProgress.not_completed?(namespace_id, action)
+
+ Namespaces::OnboardingProgressWorker.perform_async(namespace_id, action)
+ end
+ end
+
+ def self.async(namespace_id)
+ Async.new(namespace_id)
+ end
+
def initialize(namespace)
@namespace = namespace&.root_ancestor
end
diff --git a/app/services/packages/composer/create_package_service.rb b/app/services/packages/composer/create_package_service.rb
index 0f5429f667e..c84d40c3753 100644
--- a/app/services/packages/composer/create_package_service.rb
+++ b/app/services/packages/composer/create_package_service.rb
@@ -17,6 +17,8 @@ module Packages
})
end
+ ::Packages::Composer::CacheUpdateWorker.perform_async(created_package.project_id, created_package.name, nil)
+
created_package
end
diff --git a/app/services/packages/conan/search_service.rb b/app/services/packages/conan/search_service.rb
index 4513616bad2..143fd8a627b 100644
--- a/app/services/packages/conan/search_service.rb
+++ b/app/services/packages/conan/search_service.rb
@@ -44,7 +44,7 @@ module Packages
name, version, username, _ = query.split(/[@\/]/)
full_path = Packages::Conan::Metadatum.full_path_from(package_username: username)
project = Project.find_by_full_path(full_path)
- return unless current_user.can?(:read_package, project)
+ return unless Ability.allowed?(current_user, :read_package, project)
result = project.packages.with_name(name).with_version(version).order_created.last
[result&.conan_recipe].compact
diff --git a/app/services/packages/create_event_service.rb b/app/services/packages/create_event_service.rb
index 63248ef07c9..8fed6e2def8 100644
--- a/app/services/packages/create_event_service.rb
+++ b/app/services/packages/create_event_service.rb
@@ -3,14 +3,12 @@
module Packages
class CreateEventService < BaseService
def execute
- if Feature.enabled?(:collect_package_events_redis, default_enabled: true)
- ::Packages::Event.unique_counters_for(event_scope, event_name, originator_type).each do |event_name|
- ::Gitlab::UsageDataCounters::HLLRedisCounter.track_event(event_name, values: current_user.id)
- end
-
- ::Packages::Event.counters_for(event_scope, event_name, originator_type).each do |event_name|
- ::Gitlab::UsageDataCounters::PackageEventCounter.count(event_name)
- end
+ ::Packages::Event.unique_counters_for(event_scope, event_name, originator_type).each do |event_name|
+ ::Gitlab::UsageDataCounters::HLLRedisCounter.track_event(event_name, values: current_user.id)
+ end
+
+ ::Packages::Event.counters_for(event_scope, event_name, originator_type).each do |event_name|
+ ::Gitlab::UsageDataCounters::PackageEventCounter.count(event_name)
end
if Feature.enabled?(:collect_package_events) && Gitlab::Database.read_write?
diff --git a/app/services/packages/create_temporary_package_service.rb b/app/services/packages/create_temporary_package_service.rb
new file mode 100644
index 00000000000..ee609fd787d
--- /dev/null
+++ b/app/services/packages/create_temporary_package_service.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Packages
+ class CreateTemporaryPackageService < ::Packages::CreatePackageService
+ PACKAGE_VERSION = '0.0.0'
+
+ def execute(package_type, name: 'Temporary.Package')
+ create_package!(package_type,
+ name: name,
+ version: "#{PACKAGE_VERSION}-#{uuid}",
+ status: 'processing'
+ )
+ end
+
+ private
+
+ def uuid
+ SecureRandom.uuid
+ end
+ end
+end
diff --git a/app/services/packages/debian/get_or_create_incoming_service.rb b/app/services/packages/debian/find_or_create_incoming_service.rb
index 09e7877a2b4..2d29ba5f3c3 100644
--- a/app/services/packages/debian/get_or_create_incoming_service.rb
+++ b/app/services/packages/debian/find_or_create_incoming_service.rb
@@ -2,7 +2,7 @@
module Packages
module Debian
- class GetOrCreateIncomingService < ::Packages::CreatePackageService
+ class FindOrCreateIncomingService < ::Packages::CreatePackageService
def execute
find_or_create_package!(:debian, name: 'incoming', version: nil)
end
diff --git a/app/services/packages/debian/find_or_create_package_service.rb b/app/services/packages/debian/find_or_create_package_service.rb
new file mode 100644
index 00000000000..46e06c9f584
--- /dev/null
+++ b/app/services/packages/debian/find_or_create_package_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Packages
+ module Debian
+ class FindOrCreatePackageService < ::Packages::CreatePackageService
+ include Gitlab::Utils::StrongMemoize
+
+ def execute
+ package = project.packages
+ .debian
+ .with_name(params[:name])
+ .with_version(params[:version])
+ .with_debian_codename(params[:distribution_name])
+ .first
+
+ package ||= create_package!(
+ :debian,
+ debian_publication_attributes: { distribution_id: distribution.id }
+ )
+
+ ServiceResponse.success(payload: { package: package })
+ end
+
+ private
+
+ def distribution
+ strong_memoize(:distribution) do
+ Packages::Debian::DistributionsFinder.new(project, codename: params[:distribution_name]).execute.last!
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/find_or_create_package_service.rb b/app/services/packages/maven/find_or_create_package_service.rb
index 4c916d264a7..401e52f7e51 100644
--- a/app/services/packages/maven/find_or_create_package_service.rb
+++ b/app/services/packages/maven/find_or_create_package_service.rb
@@ -2,7 +2,6 @@
module Packages
module Maven
class FindOrCreatePackageService < BaseService
- MAVEN_METADATA_FILE = 'maven-metadata.xml'
SNAPSHOT_TERM = '-SNAPSHOT'
def execute
@@ -33,7 +32,7 @@ module Packages
# - my-company/my-app/maven-metadata.xml
#
# The first upload has to create the proper package (the one with the version set).
- if params[:file_name] == MAVEN_METADATA_FILE && !params[:path]&.ends_with?(SNAPSHOT_TERM)
+ if params[:file_name] == Packages::Maven::Metadata.filename && !params[:path]&.ends_with?(SNAPSHOT_TERM)
package_name, version = params[:path], nil
else
package_name, _, version = params[:path].rpartition('/')
diff --git a/app/services/packages/maven/metadata.rb b/app/services/packages/maven/metadata.rb
new file mode 100644
index 00000000000..437e18e3138
--- /dev/null
+++ b/app/services/packages/maven/metadata.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Packages
+ module Maven
+ module Metadata
+ FILENAME = 'maven-metadata.xml'
+
+ def self.filename
+ FILENAME
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/metadata/append_package_file_service.rb b/app/services/packages/maven/metadata/append_package_file_service.rb
new file mode 100644
index 00000000000..e991576ebc6
--- /dev/null
+++ b/app/services/packages/maven/metadata/append_package_file_service.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+module Packages
+ module Maven
+ module Metadata
+ class AppendPackageFileService
+ XML_CONTENT_TYPE = 'application/xml'
+ DEFAULT_CONTENT_TYPE = 'application/octet-stream'
+
+ MD5_FILE_NAME = "#{Metadata.filename}.md5"
+ SHA1_FILE_NAME = "#{Metadata.filename}.sha1"
+ SHA256_FILE_NAME = "#{Metadata.filename}.sha256"
+ SHA512_FILE_NAME = "#{Metadata.filename}.sha512"
+
+ def initialize(package:, metadata_content:)
+ @package = package
+ @metadata_content = metadata_content
+ end
+
+ def execute
+ return ServiceResponse.error(message: 'package is not set') unless @package
+ return ServiceResponse.error(message: 'metadata content is not set') unless @metadata_content
+
+ file_md5 = digest_from(@metadata_content, :md5)
+ file_sha1 = digest_from(@metadata_content, :sha1)
+ file_sha256 = digest_from(@metadata_content, :sha256)
+ file_sha512 = digest_from(@metadata_content, :sha512)
+
+ @package.transaction do
+ append_metadata_file(
+ content: @metadata_content,
+ file_name: Metadata.filename,
+ content_type: XML_CONTENT_TYPE,
+ sha1: file_sha1,
+ md5: file_md5,
+ sha256: file_sha256
+ )
+
+ append_metadata_file(content: file_md5, file_name: MD5_FILE_NAME)
+ append_metadata_file(content: file_sha1, file_name: SHA1_FILE_NAME)
+ append_metadata_file(content: file_sha256, file_name: SHA256_FILE_NAME)
+ append_metadata_file(content: file_sha512, file_name: SHA512_FILE_NAME)
+ end
+
+ ServiceResponse.success(message: 'New metadata package file created')
+ end
+
+ private
+
+ def append_metadata_file(content:, file_name:, content_type: DEFAULT_CONTENT_TYPE, sha1: nil, md5: nil, sha256: nil)
+ file_md5 = md5 || digest_from(content, :md5)
+ file_sha1 = sha1 || digest_from(content, :sha1)
+ file_sha256 = sha256 || digest_from(content, :sha256)
+
+ file = CarrierWaveStringFile.new_file(
+ file_content: content,
+ filename: file_name,
+ content_type: content_type
+ )
+
+ ::Packages::CreatePackageFileService.new(
+ @package,
+ file: file,
+ size: file.size,
+ file_name: file_name,
+ file_sha1: file_sha1,
+ file_md5: file_md5,
+ file_sha256: file_sha256
+ ).execute
+ end
+
+ def digest_from(content, type)
+ digest_class = case type
+ when :md5
+ Digest::MD5
+ when :sha1
+ Digest::SHA1
+ when :sha256
+ Digest::SHA256
+ when :sha512
+ Digest::SHA512
+ end
+ digest_class.hexdigest(content)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/metadata/base_create_xml_service.rb b/app/services/packages/maven/metadata/base_create_xml_service.rb
new file mode 100644
index 00000000000..4d5cab4978e
--- /dev/null
+++ b/app/services/packages/maven/metadata/base_create_xml_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Packages
+ module Maven
+ module Metadata
+ class BaseCreateXmlService
+ include Gitlab::Utils::StrongMemoize
+
+ INDENT_SPACE = 2
+
+ def initialize(metadata_content:, package:)
+ @metadata_content = metadata_content
+ @package = package
+ end
+
+ private
+
+ def xml_doc
+ strong_memoize(:xml_doc) do
+ Nokogiri::XML(@metadata_content) do |config|
+ config.default_xml.noblanks
+ end
+ end
+ end
+
+ def xml_node(name, content)
+ xml_doc.create_element(name).tap { |e| e.content = content }
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/metadata/create_plugins_xml_service.rb b/app/services/packages/maven/metadata/create_plugins_xml_service.rb
new file mode 100644
index 00000000000..707a8c577ba
--- /dev/null
+++ b/app/services/packages/maven/metadata/create_plugins_xml_service.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+module Packages
+ module Maven
+ module Metadata
+ class CreatePluginsXmlService < BaseCreateXmlService
+ XPATH_PLUGIN_ARTIFACT_ID = '//plugin/artifactId'
+ XPATH_PLUGINS = '//metadata/plugins'
+ EMPTY_PLUGINS_PAYLOAD = {
+ changes_exist: true,
+ empty_plugins: true
+ }.freeze
+
+ def execute
+ return ServiceResponse.error(message: 'package not set') unless @package
+ return ServiceResponse.error(message: 'metadata_content not set') unless @metadata_content
+ return ServiceResponse.error(message: 'metadata_content is invalid') unless plugins_xml_node.present?
+ return ServiceResponse.success(payload: EMPTY_PLUGINS_PAYLOAD) if plugin_artifact_ids_from_database.empty?
+
+ changes_exist = update_plugins_list
+
+ payload = { changes_exist: changes_exist, empty_versions: false }
+ payload[:metadata_content] = xml_doc.to_xml(indent: INDENT_SPACE) if changes_exist
+
+ ServiceResponse.success(payload: payload)
+ end
+
+ private
+
+ def update_plugins_list
+ return false if plugin_artifact_ids_from_xml == plugin_artifact_ids_from_database
+
+ plugins_xml_node.children.remove
+
+ plugin_artifact_ids_from_database.each do |artifact_id|
+ plugins_xml_node.add_child(plugin_node_for(artifact_id))
+ end
+
+ true
+ end
+
+ def plugins_xml_node
+ strong_memoize(:plugins_xml_node) do
+ xml_doc.xpath(XPATH_PLUGINS)
+ .first
+ end
+ end
+
+ def plugin_artifact_ids_from_xml
+ strong_memoize(:plugin_artifact_ids_from_xml) do
+ plugins_xml_node.xpath(XPATH_PLUGIN_ARTIFACT_ID)
+ .map(&:content)
+ end
+ end
+
+ def plugin_artifact_ids_from_database
+ strong_memoize(:plugin_artifact_ids_from_database) do
+ package_names = plugin_artifact_ids_from_xml.map do |artifact_id|
+ "#{@package.name}/#{artifact_id}"
+ end
+
+ packages = @package.project.packages
+ .maven
+ .displayable
+ .with_name(package_names)
+ .has_version
+
+ ::Packages::Maven::Metadatum.for_package_ids(packages.select(:id))
+ .order_created
+ .pluck_app_name
+ .uniq
+ end
+ end
+
+ def plugin_node_for(artifact_id)
+ xml_doc.create_element('plugin').tap do |plugin_node|
+ plugin_node.add_child(xml_node('name', artifact_id))
+ plugin_node.add_child(xml_node('prefix', prefix_from(artifact_id)))
+ plugin_node.add_child(xml_node('artifactId', artifact_id))
+ end
+ end
+
+ # Maven plugin prefix generation from
+ # https://github.com/apache/maven/blob/c3dba0e5ba71ee7cbd62620f669a8c206e71b5e2/maven-plugin-api/src/main/java/org/apache/maven/plugin/descriptor/PluginDescriptor.java#L189
+ def prefix_from(artifact_id)
+ artifact_id.gsub(/-?maven-?/, '')
+ .gsub(/-?plugin-?/, '')
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/metadata/create_versions_xml_service.rb b/app/services/packages/maven/metadata/create_versions_xml_service.rb
new file mode 100644
index 00000000000..13b6efa8650
--- /dev/null
+++ b/app/services/packages/maven/metadata/create_versions_xml_service.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+module Packages
+ module Maven
+ module Metadata
+ class CreateVersionsXmlService < BaseCreateXmlService
+ XPATH_VERSIONING = '//metadata/versioning'
+ XPATH_VERSIONS = '//versions'
+ XPATH_VERSION = '//version'
+ XPATH_LATEST = '//latest'
+ XPATH_RELEASE = '//release'
+ XPATH_LAST_UPDATED = '//lastUpdated'
+
+ EMPTY_VERSIONS_PAYLOAD = {
+ changes_exist: true,
+ empty_versions: true
+ }.freeze
+
+ def execute
+ return ServiceResponse.error(message: 'package not set') unless @package
+ return ServiceResponse.error(message: 'metadata_content not set') unless @metadata_content
+ return ServiceResponse.error(message: 'metadata_content is invalid') unless valid_metadata_content?
+ return ServiceResponse.success(payload: EMPTY_VERSIONS_PAYLOAD) if versions_from_database.empty?
+
+ changes_exist = false
+ changes_exist = true if update_versions_list
+ changes_exist = true if update_latest
+ changes_exist = true if update_release
+ update_last_updated_timestamp if changes_exist
+
+ payload = { changes_exist: changes_exist, empty_versions: false }
+ payload[:metadata_content] = xml_doc.to_xml(indent: INDENT_SPACE) if changes_exist
+
+ ServiceResponse.success(payload: payload)
+ end
+
+ private
+
+ def valid_metadata_content?
+ versioning_xml_node.present? &&
+ versions_xml_node.present? &&
+ last_updated_xml_node.present?
+ end
+
+ def update_versions_list
+ return false if versions_from_xml == versions_from_database
+
+ version_xml_nodes.remove
+
+ versions_from_database.each do |version|
+ versions_xml_node.add_child(xml_node('version', version))
+ end
+ true
+ end
+
+ def update_latest
+ return false if latest_coherent?
+
+ latest_xml_node.content = latest_from_database
+ true
+ end
+
+ def latest_coherent?
+ latest_from_xml.nil? || latest_from_xml == latest_from_database
+ end
+
+ def update_release
+ return false if release_coherent?
+
+ if release_from_database
+ release_xml_node.content = release_from_database
+ else
+ release_xml_node.remove
+ end
+
+ true
+ end
+
+ def release_coherent?
+ release_from_xml == release_from_database
+ end
+
+ def update_last_updated_timestamp
+ last_updated_xml_node.content = Time.zone.now.strftime('%Y%m%d%H%M%S')
+ end
+
+ def versioning_xml_node
+ strong_memoize(:versioning_xml_node) do
+ xml_doc.xpath(XPATH_VERSIONING).first
+ end
+ end
+
+ def versions_xml_node
+ strong_memoize(:versions_xml_node) do
+ versioning_xml_node&.xpath(XPATH_VERSIONS)
+ &.first
+ end
+ end
+
+ def version_xml_nodes
+ versions_xml_node&.xpath(XPATH_VERSION)
+ end
+
+ def latest_xml_node
+ strong_memoize(:latest_xml_node) do
+ versioning_xml_node&.xpath(XPATH_LATEST)
+ &.first
+ end
+ end
+
+ def release_xml_node
+ strong_memoize(:release_xml_node) do
+ versioning_xml_node&.xpath(XPATH_RELEASE)
+ &.first
+ end
+ end
+
+ def last_updated_xml_node
+ strong_memoize(:last_updated_xml_mode) do
+ versioning_xml_node.xpath(XPATH_LAST_UPDATED)
+ .first
+ end
+ end
+
+ def versions_from_xml
+ strong_memoize(:versions_from_xml) do
+ versions_xml_node.xpath(XPATH_VERSION)
+ .map(&:text)
+ end
+ end
+
+ def latest_from_xml
+ latest_xml_node&.text
+ end
+
+ def release_from_xml
+ release_xml_node&.text
+ end
+
+ def versions_from_database
+ strong_memoize(:versions_from_database) do
+ @package.project.packages
+ .maven
+ .displayable
+ .with_name(@package.name)
+ .has_version
+ .order_created
+ .pluck_versions
+ end
+ end
+
+ def latest_from_database
+ versions_from_database.last
+ end
+
+ def release_from_database
+ strong_memoize(:release_from_database) do
+ non_snapshot_versions_from_database = versions_from_database.reject { |v| v.ends_with?('SNAPSHOT') }
+ non_snapshot_versions_from_database.last
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/metadata/sync_service.rb b/app/services/packages/maven/metadata/sync_service.rb
new file mode 100644
index 00000000000..a6534aa706d
--- /dev/null
+++ b/app/services/packages/maven/metadata/sync_service.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+module Packages
+ module Maven
+ module Metadata
+ class SyncService < BaseContainerService
+ include Gitlab::Utils::StrongMemoize
+
+ alias_method :project, :container
+
+ MAX_FILE_SIZE = 10.megabytes.freeze
+
+ def execute
+ return error('Blank package name') unless package_name
+ return error('Not allowed') unless Ability.allowed?(current_user, :destroy_package, project)
+ return error('Non existing versionless package') unless versionless_package_for_versions
+ return error('Non existing metadata file for versions') unless metadata_package_file_for_versions
+
+ if metadata_package_file_for_plugins
+ result = update_plugins_xml
+
+ return result if result.error?
+ end
+
+ update_versions_xml
+ end
+
+ private
+
+ def update_versions_xml
+ update_xml(
+ kind: :versions,
+ package_file: metadata_package_file_for_versions,
+ service_class: CreateVersionsXmlService,
+ payload_empty_field: :empty_versions
+ )
+ end
+
+ def update_plugins_xml
+ update_xml(
+ kind: :plugins,
+ package_file: metadata_package_file_for_plugins,
+ service_class: CreatePluginsXmlService,
+ payload_empty_field: :empty_plugins
+ )
+ end
+
+ def update_xml(kind:, package_file:, service_class:, payload_empty_field:)
+ return error("Metadata file for #{kind} is too big") if package_file.size > MAX_FILE_SIZE
+
+ package_file.file.use_open_file do |file|
+ result = service_class.new(metadata_content: file, package: package_file.package)
+ .execute
+
+ next result unless result.success?
+ next success("No changes for #{kind} xml") unless result.payload[:changes_exist]
+
+ if result.payload[payload_empty_field]
+ package_file.package.destroy!
+ success("Versionless package for #{kind} destroyed")
+ else
+ AppendPackageFileService.new(metadata_content: result.payload[:metadata_content], package: package_file.package)
+ .execute
+ end
+ end
+ end
+
+ def metadata_package_file_for_versions
+ strong_memoize(:metadata_file_for_versions) do
+ metadata_package_file_for(versionless_package_for_versions)
+ end
+ end
+
+ def versionless_package_for_versions
+ strong_memoize(:versionless_package_for_versions) do
+ versionless_package_named(package_name)
+ end
+ end
+
+ def metadata_package_file_for_plugins
+ strong_memoize(:metadata_package_file_for_plugins) do
+ metadata_package_file_for(versionless_package_named(package_name_for_plugins))
+ end
+ end
+
+ def metadata_package_file_for(package)
+ return unless package
+
+ package.package_files
+ .with_file_name(Metadata.filename)
+ .recent
+ .first
+ end
+
+ def versionless_package_named(name)
+ project.packages
+ .maven
+ .displayable
+ .with_name(name)
+ .with_version(nil)
+ .first
+ end
+
+ def package_name
+ params[:package_name]
+ end
+
+ def package_name_for_plugins
+ group = versionless_package_for_versions.maven_metadatum.app_group
+ group.tr('.', '/')
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def success(message)
+ ServiceResponse.success(message: message)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/create_package_service.rb b/app/services/packages/nuget/create_package_service.rb
deleted file mode 100644
index 3999ccd3347..00000000000
--- a/app/services/packages/nuget/create_package_service.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-module Packages
- module Nuget
- class CreatePackageService < ::Packages::CreatePackageService
- TEMPORARY_PACKAGE_NAME = 'NuGet.Temporary.Package'
- PACKAGE_VERSION = '0.0.0'
-
- def execute
- create_package!(:nuget,
- name: TEMPORARY_PACKAGE_NAME,
- version: "#{PACKAGE_VERSION}-#{uuid}"
- )
- end
-
- private
-
- def uuid
- SecureRandom.uuid
- end
- end
- end
-end
diff --git a/app/services/packages/nuget/update_package_from_metadata_service.rb b/app/services/packages/nuget/update_package_from_metadata_service.rb
index 0109ee23c49..1bcab00bd92 100644
--- a/app/services/packages/nuget/update_package_from_metadata_service.rb
+++ b/app/services/packages/nuget/update_package_from_metadata_service.rb
@@ -68,7 +68,8 @@ module Packages
def update_linked_package
@package_file.package.update!(
name: package_name,
- version: package_version
+ version: package_version,
+ status: :default
)
::Packages::Nuget::CreateDependencyService.new(@package_file.package, package_dependencies)
diff --git a/app/services/packages/rubygems/dependency_resolver_service.rb b/app/services/packages/rubygems/dependency_resolver_service.rb
new file mode 100644
index 00000000000..c44b26e2b92
--- /dev/null
+++ b/app/services/packages/rubygems/dependency_resolver_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Packages
+ module Rubygems
+ class DependencyResolverService < BaseService
+ include Gitlab::Utils::StrongMemoize
+
+ DEFAULT_PLATFORM = 'ruby'
+
+ def execute
+ return ServiceResponse.error(message: "forbidden", http_status: :forbidden) unless Ability.allowed?(current_user, :read_package, project)
+ return ServiceResponse.error(message: "#{gem_name} not found", http_status: :not_found) if packages.empty?
+
+ payload = packages.map do |package|
+ dependencies = package.dependency_links.map do |link|
+ [link.dependency.name, link.dependency.version_pattern]
+ end
+
+ {
+ name: gem_name,
+ number: package.version,
+ platform: DEFAULT_PLATFORM,
+ dependencies: dependencies
+ }
+ end
+
+ ServiceResponse.success(payload: payload)
+ end
+
+ private
+
+ def packages
+ strong_memoize(:packages) do
+ project.packages.with_name(gem_name)
+ end
+ end
+
+ def gem_name
+ params[:gem_name]
+ end
+ end
+ end
+end
diff --git a/app/services/pages/legacy_storage_lease.rb b/app/services/pages/legacy_storage_lease.rb
index 3f42fc8c63b..1849def0183 100644
--- a/app/services/pages/legacy_storage_lease.rb
+++ b/app/services/pages/legacy_storage_lease.rb
@@ -8,15 +8,6 @@ module Pages
LEASE_TIMEOUT = 1.hour
- # override method from exclusive lease guard to guard it by feature flag
- # TODO: just remove this method after testing this in production
- # https://gitlab.com/gitlab-org/gitlab/-/issues/282464
- def try_obtain_lease
- return yield unless Feature.enabled?(:pages_use_legacy_storage_lease, project, default_enabled: true)
-
- super
- end
-
def lease_key
"pages_legacy_storage:#{project.id}"
end
diff --git a/app/services/projects/autocomplete_service.rb b/app/services/projects/autocomplete_service.rb
index 53bd954eab6..68086f636b7 100644
--- a/app/services/projects/autocomplete_service.rb
+++ b/app/services/projects/autocomplete_service.rb
@@ -16,7 +16,7 @@ module Projects
finder_params[:group_ids] = @project.group.self_and_ancestors.select(:id) if @project.group
- MilestonesFinder.new(finder_params).execute.select([:iid, :title])
+ MilestonesFinder.new(finder_params).execute.select([:iid, :title, :due_date])
end
def merge_requests
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 08f569662a8..e3b1fd5f4c0 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -19,6 +19,8 @@ module Projects
@project = Project.new(params)
+ @project.visibility_level = @project.group.visibility_level unless @project.visibility_level_allowed_by_group?
+
# If a project is newly created it should have shared runners settings
# based on its group having it enabled. This is like the "default value"
@project.shared_runners_enabled = false if !params.key?(:shared_runners_enabled) && @project.group && @project.group.shared_runners_setting != 'enabled'
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index c1501625300..6840c395a76 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -107,12 +107,7 @@ module Projects
end
project.leave_pool_repository
-
- if Gitlab::Ci::Features.project_transactionless_destroy?(project)
- destroy_project_related_records(project)
- else
- Project.transaction { destroy_project_related_records(project) }
- end
+ destroy_project_related_records(project)
end
def destroy_project_related_records(project)
diff --git a/app/services/projects/group_links/create_service.rb b/app/services/projects/group_links/create_service.rb
index 3fcc721fe65..3262839e246 100644
--- a/app/services/projects/group_links/create_service.rb
+++ b/app/services/projects/group_links/create_service.rb
@@ -23,7 +23,7 @@ module Projects
private
def setup_authorizations(group, group_access = nil)
- if Feature.enabled?(:specialized_project_authorization_project_share_worker)
+ if Feature.enabled?(:specialized_project_authorization_project_share_worker, default_enabled: :yaml)
AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(
project.id, group.id, group_access)
diff --git a/app/services/projects/schedule_bulk_repository_shard_moves_service.rb b/app/services/projects/schedule_bulk_repository_shard_moves_service.rb
index 53de9abdb59..98fc2e22967 100644
--- a/app/services/projects/schedule_bulk_repository_shard_moves_service.rb
+++ b/app/services/projects/schedule_bulk_repository_shard_moves_service.rb
@@ -25,7 +25,7 @@ module Projects
override :schedule_bulk_worker_klass
def self.schedule_bulk_worker_klass
- ::ProjectScheduleBulkRepositoryShardMovesWorker
+ ::Projects::ScheduleBulkRepositoryShardMovesWorker
end
end
end
diff --git a/app/services/projects/update_pages_configuration_service.rb b/app/services/projects/update_pages_configuration_service.rb
index 67d388dc8a3..01539d58545 100644
--- a/app/services/projects/update_pages_configuration_service.rb
+++ b/app/services/projects/update_pages_configuration_service.rb
@@ -11,6 +11,8 @@ module Projects
end
def execute
+ return success unless Feature.enabled?(:pages_update_legacy_storage, default_enabled: true)
+
# If the pages were never deployed, we can't write out the config, as the
# directory would not exist.
# https://gitlab.com/gitlab-org/gitlab/-/issues/235139
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 29e92d725e2..2b59fdd539d 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -33,6 +33,7 @@ module Projects
@status = create_status
@status.enqueue!
@status.run!
+ @status.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, project, default_enabled: :yaml)
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
diff --git a/app/services/protected_branches/api_service.rb b/app/services/protected_branches/api_service.rb
index ac4917d6590..bf1a966472b 100644
--- a/app/services/protected_branches/api_service.rb
+++ b/app/services/protected_branches/api_service.rb
@@ -9,10 +9,15 @@ module ProtectedBranches
def protected_branch_params
{
name: params[:name],
+ allow_force_push: allow_force_push?,
push_access_levels_attributes: AccessLevelParams.new(:push, params).access_levels,
merge_access_levels_attributes: AccessLevelParams.new(:merge, params).access_levels
}
end
+
+ def allow_force_push?
+ params[:allow_force_push] || false
+ end
end
end
diff --git a/app/services/repositories/changelog_service.rb b/app/services/repositories/changelog_service.rb
index 96a63865a49..3981e91e7f3 100644
--- a/app/services/repositories/changelog_service.rb
+++ b/app/services/repositories/changelog_service.rb
@@ -39,10 +39,10 @@ module Repositories
project,
user,
version:,
- to:,
+ branch: project.default_branch_or_master,
from: nil,
+ to: branch,
date: DateTime.now,
- branch: project.default_branch_or_master,
trailer: DEFAULT_TRAILER,
file: DEFAULT_FILE,
message: "Add changelog for version #{version}"
@@ -73,7 +73,7 @@ module Repositories
.new(version: @version, date: @date, config: config)
commits =
- CommitsWithTrailerFinder.new(project: @project, from: from, to: @to)
+ ChangelogCommitsFinder.new(project: @project, from: from, to: @to)
commits.each_page(@trailer) do |page|
mrs = mrs_finder.execute(page)
diff --git a/app/services/security/vulnerability_uuid.rb b/app/services/security/vulnerability_uuid.rb
new file mode 100644
index 00000000000..3eab0f3dad6
--- /dev/null
+++ b/app/services/security/vulnerability_uuid.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module Security
+ class VulnerabilityUUID
+ def self.generate(report_type:, primary_identifier_fingerprint:, location_fingerprint:, project_id:)
+ Gitlab::UUID.v5("#{report_type}-#{primary_identifier_fingerprint}-#{location_fingerprint}-#{project_id}")
+ end
+ end
+end
diff --git a/app/services/snippets/schedule_bulk_repository_shard_moves_service.rb b/app/services/snippets/schedule_bulk_repository_shard_moves_service.rb
index f7bdd0a99a5..09eac8e6cda 100644
--- a/app/services/snippets/schedule_bulk_repository_shard_moves_service.rb
+++ b/app/services/snippets/schedule_bulk_repository_shard_moves_service.rb
@@ -25,7 +25,7 @@ module Snippets
override :schedule_bulk_worker_klass
def self.schedule_bulk_worker_klass
- ::SnippetScheduleBulkRepositoryShardMovesWorker
+ ::Snippets::ScheduleBulkRepositoryShardMovesWorker
end
end
end
diff --git a/app/services/spam/spam_action_service.rb b/app/services/spam/spam_action_service.rb
index ff32bc32d93..185b9e39070 100644
--- a/app/services/spam/spam_action_service.rb
+++ b/app/services/spam/spam_action_service.rb
@@ -9,7 +9,9 @@ module Spam
# after the spammable is created/updated based on the remaining parameters.
#
# Takes a hash of parameters from an incoming request to modify a model (via a controller,
- # service, or GraphQL mutation).
+ # service, or GraphQL mutation). The parameters will either be camelCase (if they are
+ # received directly via controller params) or underscore_case (if they have come from
+ # a GraphQL mutation which has converted them to underscore)
#
# Deletes the parameters which are related to spam and captcha processing, and returns
# them in a SpamParams parameters object. See:
@@ -18,12 +20,12 @@ module Spam
# NOTE: The 'captcha_response' field can be expanded to multiple fields when we move to future
# alternative captcha implementations such as FriendlyCaptcha. See
# https://gitlab.com/gitlab-org/gitlab/-/issues/273480
- captcha_response = params.delete(:captcha_response)
+ captcha_response = params.delete(:captcha_response) || params.delete(:captchaResponse)
SpamParams.new(
api: params.delete(:api),
captcha_response: captcha_response,
- spam_log_id: params.delete(:spam_log_id)
+ spam_log_id: params.delete(:spam_log_id) || params.delete(:spamLogId)
)
end
diff --git a/app/services/system_hooks_service.rb b/app/services/system_hooks_service.rb
index 5273dedb56f..d854b95cb93 100644
--- a/app/services/system_hooks_service.rb
+++ b/app/services/system_hooks_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class SystemHooksService
- BUILDER_DRIVEN_EVENT_DATA_AVAILABLE_FOR_CLASSES = [GroupMember, Group].freeze
+ BUILDER_DRIVEN_EVENT_DATA_AVAILABLE_FOR_CLASSES = [GroupMember, Group, ProjectMember].freeze
def execute_hooks_for(model, event)
data = build_event_data(model, event)
@@ -56,22 +56,13 @@ class SystemHooksService
when :failed_login
data[:state] = model.state
end
- when ProjectMember
- data.merge!(project_member_data(model))
end
data
end
def build_event_name(model, event)
- case model
- when ProjectMember
- return "user_add_to_team" if event == :create
- return "user_remove_from_team" if event == :destroy
- return "user_update_for_team" if event == :update
- else
- "#{model.class.name.downcase}_#{event}"
- end
+ "#{model.class.name.downcase}_#{event}"
end
def project_data(model)
@@ -88,23 +79,6 @@ class SystemHooksService
}
end
- def project_member_data(model)
- project = model.project || Project.unscoped.find(model.source_id)
-
- {
- project_name: project.name,
- project_path: project.path,
- project_path_with_namespace: project.full_path,
- project_id: project.id,
- user_username: model.user.username,
- user_name: model.user.name,
- user_email: model.user.email,
- user_id: model.user.id,
- access_level: model.human_access,
- project_visibility: Project.visibility_levels.key(project.visibility_level_value).downcase
- }
- end
-
def user_data(model)
{
name: model.name,
@@ -124,6 +98,8 @@ class SystemHooksService
Gitlab::HookData::GroupMemberBuilder
when Group
Gitlab::HookData::GroupBuilder
+ when ProjectMember
+ Gitlab::HookData::ProjectMemberBuilder
end
builder_class.new(model).build(event)
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 7d654ca7f5b..082ed93eca2 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -175,7 +175,7 @@ module SystemNoteService
# Example Note text is based on event_type:
#
# update: "changed target branch from `Old` to `New`"
- # delete: "changed automatically target branch to `New` because `Old` was deleted"
+ # delete: "deleted the `Old` branch. This merge request now targets the `New` branch"
#
# Returns the created Note object
def change_branch(noteable, project, author, branch_type, event_type, old_branch, new_branch)
@@ -241,6 +241,10 @@ module SystemNoteService
::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).mark_canonical_issue_of_duplicate(duplicate_issue)
end
+ def add_email_participants(noteable, project, author, body)
+ ::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).add_email_participants(body)
+ end
+
def discussion_lock(issuable, author)
::SystemNotes::IssuablesService.new(noteable: issuable, project: issuable.project, author: author).discussion_lock
end
@@ -323,6 +327,10 @@ module SystemNoteService
::SystemNotes::IncidentService.new(noteable: incident, project: incident.project, author: author).change_incident_severity
end
+ def log_resolving_alert(alert, monitoring_tool)
+ ::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project).log_resolving_alert(monitoring_tool)
+ end
+
private
def merge_requests_service(noteable, project, author)
diff --git a/app/services/system_notes/alert_management_service.rb b/app/services/system_notes/alert_management_service.rb
index 376f2c1cfbf..27ddf2e36f1 100644
--- a/app/services/system_notes/alert_management_service.rb
+++ b/app/services/system_notes/alert_management_service.rb
@@ -62,5 +62,20 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'status'))
end
+
+ # Called when an alert is resolved due to received resolving alert payload
+ #
+ # alert - AlertManagement::Alert object.
+ #
+ # Example Note text:
+ #
+ # "changed the status to Resolved by closing issue #17"
+ #
+ # Returns the created Note object
+ def log_resolving_alert(monitoring_tool)
+ body = "logged a resolving alert from **#{monitoring_tool}**"
+
+ create_note(NoteSummary.new(noteable, project, User.alert_bot, body, action: 'new_alert_added'))
+ end
end
end
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index b344b240a07..60dd56e772a 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -125,8 +125,8 @@ module SystemNotes
old_diffs, new_diffs = Gitlab::Diff::InlineDiff.new(old_title, new_title).inline_diffs
- marked_old_title = Gitlab::Diff::InlineDiffMarkdownMarker.new(old_title).mark(old_diffs, mode: :deletion)
- marked_new_title = Gitlab::Diff::InlineDiffMarkdownMarker.new(new_title).mark(new_diffs, mode: :addition)
+ marked_old_title = Gitlab::Diff::InlineDiffMarkdownMarker.new(old_title).mark(old_diffs)
+ marked_new_title = Gitlab::Diff::InlineDiffMarkdownMarker.new(new_title).mark(new_diffs)
body = "changed title from **#{marked_old_title}** to **#{marked_new_title}**"
@@ -354,6 +354,10 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'duplicate'))
end
+ def add_email_participants(body)
+ create_note(NoteSummary.new(noteable, project, author, body))
+ end
+
def discussion_lock
action = noteable.discussion_locked? ? 'locked' : 'unlocked'
body = "#{action} this #{noteable.class.to_s.titleize.downcase}"
diff --git a/app/services/system_notes/merge_requests_service.rb b/app/services/system_notes/merge_requests_service.rb
index 99e03e67bf1..546a23c95c2 100644
--- a/app/services/system_notes/merge_requests_service.rb
+++ b/app/services/system_notes/merge_requests_service.rb
@@ -90,14 +90,14 @@ module SystemNotes
# Example Note text is based on event_type:
#
# update: "changed target branch from `Old` to `New`"
- # delete: "changed automatically target branch to `New` because `Old` was deleted"
+ # delete: "deleted the `Old` branch. This merge request now targets the `New` branch"
#
# Returns the created Note object
def change_branch(branch_type, event_type, old_branch, new_branch)
body =
case event_type.to_s
when 'delete'
- "changed automatically #{branch_type} branch to `#{new_branch}` because `#{old_branch}` was deleted"
+ "deleted the `#{old_branch}` branch. This merge request now targets the `#{new_branch}` branch"
when 'update'
"changed #{branch_type} branch from `#{old_branch}` to `#{new_branch}`"
else
diff --git a/app/services/terraform/remote_state_handler.rb b/app/services/terraform/remote_state_handler.rb
index 9500a821071..db47bc024ba 100644
--- a/app/services/terraform/remote_state_handler.rb
+++ b/app/services/terraform/remote_state_handler.rb
@@ -60,7 +60,7 @@ module Terraform
private
def retrieve_with_lock(find_only: false)
- create_or_find!(find_only: find_only).tap { |state| retry_optimistic_lock(state) { |state| yield state } }
+ create_or_find!(find_only: find_only).tap { |state| retry_optimistic_lock(state, name: 'terraform_remote_state_handler_retrieve') { |state| yield state } }
end
def create_or_find!(find_only:)
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index e3f02bf85f0..b3b172f9df2 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -83,7 +83,8 @@ module Users
:location,
:public_email,
:user_type,
- :note
+ :note,
+ :view_diffs_file_by_file
]
end
diff --git a/app/services/users/dismiss_user_callout_service.rb b/app/services/users/dismiss_user_callout_service.rb
new file mode 100644
index 00000000000..f05c44186bb
--- /dev/null
+++ b/app/services/users/dismiss_user_callout_service.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Users
+ class DismissUserCalloutService < BaseContainerService
+ def execute
+ current_user.find_or_initialize_callout(params[:feature_name]).tap do |callout|
+ callout.update(dismissed_at: Time.current) if callout.valid?
+ end
+ end
+ end
+end
diff --git a/app/services/users/refresh_authorized_projects_service.rb b/app/services/users/refresh_authorized_projects_service.rb
index 24e3fb73370..070713929e4 100644
--- a/app/services/users/refresh_authorized_projects_service.rb
+++ b/app/services/users/refresh_authorized_projects_service.rb
@@ -92,7 +92,7 @@ module Users
# remove - The IDs of the authorization rows to remove.
# add - Rows to insert in the form `[user id, project id, access level]`
def update_authorizations(remove = [], add = [])
- log_refresh_details(remove.length, add.length)
+ log_refresh_details(remove, add)
User.transaction do
user.remove_project_authorizations(remove) unless remove.empty?
@@ -104,11 +104,16 @@ module Users
user.reset
end
- def log_refresh_details(rows_deleted, rows_added)
+ def log_refresh_details(remove, add)
Gitlab::AppJsonLogger.info(event: 'authorized_projects_refresh',
+ user_id: user.id,
'authorized_projects_refresh.source': source,
- 'authorized_projects_refresh.rows_deleted': rows_deleted,
- 'authorized_projects_refresh.rows_added': rows_added)
+ 'authorized_projects_refresh.rows_deleted_count': remove.length,
+ 'authorized_projects_refresh.rows_added_count': add.length,
+ # most often there's only a few entries in remove and add, but limit it to the first 5
+ # entries to avoid flooding the logs
+ 'authorized_projects_refresh.rows_deleted_slice': remove.first(5),
+ 'authorized_projects_refresh.rows_added_slice': add.first(5))
end
def fresh_access_levels_per_project