Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-08-19 12:08:42 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-08-19 12:08:42 +0300
commitb76ae638462ab0f673e5915986070518dd3f9ad3 (patch)
treebdab0533383b52873be0ec0eb4d3c66598ff8b91 /app/services
parent434373eabe7b4be9593d18a585fb763f1e5f1a6f (diff)
Add latest changes from gitlab-org/gitlab@14-2-stable-eev14.2.0-rc42
Diffstat (limited to 'app/services')
-rw-r--r--app/services/admin/propagate_service_template.rb4
-rw-r--r--app/services/auth/container_registry_authentication_service.rb22
-rw-r--r--app/services/auth/dependency_proxy_authentication_service.rb20
-rw-r--r--app/services/authorized_project_update/project_recalculate_per_user_service.rb22
-rw-r--r--app/services/authorized_project_update/project_recalculate_service.rb13
-rw-r--r--app/services/auto_merge/base_service.rb6
-rw-r--r--app/services/boards/base_item_move_service.rb14
-rw-r--r--app/services/boards/issues/list_service.rb2
-rw-r--r--app/services/bulk_update_integration_service.rb13
-rw-r--r--app/services/ci/after_requeue_job_service.rb2
-rw-r--r--app/services/ci/append_build_trace_service.rb3
-rw-r--r--app/services/ci/build_cancel_service.rb35
-rw-r--r--app/services/ci/build_unschedule_service.rb35
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb5
-rw-r--r--app/services/ci/create_pipeline_service.rb18
-rw-r--r--app/services/ci/daily_build_group_report_result_service.rb8
-rw-r--r--app/services/ci/delete_unit_tests_service.rb2
-rw-r--r--app/services/ci/destroy_pipeline_service.rb2
-rw-r--r--app/services/ci/drop_pipeline_service.rb13
-rw-r--r--app/services/ci/external_pull_requests/create_pipeline_service.rb11
-rw-r--r--app/services/ci/extract_sections_from_build_trace_service.rb34
-rw-r--r--app/services/ci/pipeline_trigger_service.rb8
-rw-r--r--app/services/ci/pipelines/add_job_service.rb30
-rw-r--r--app/services/ci/queue/builds_table_strategy.rb8
-rw-r--r--app/services/ci/queue/pending_builds_strategy.rb58
-rw-r--r--app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb10
-rw-r--r--app/services/ci/unlock_artifacts_service.rb2
-rw-r--r--app/services/dependency_proxy/find_or_create_blob_service.rb4
-rw-r--r--app/services/dependency_proxy/find_or_create_manifest_service.rb8
-rw-r--r--app/services/dependency_proxy/pull_manifest_service.rb2
-rw-r--r--app/services/deployments/update_environment_service.rb2
-rw-r--r--app/services/design_management/copy_design_collection/copy_service.rb14
-rw-r--r--app/services/draft_notes/publish_service.rb30
-rw-r--r--app/services/environments/auto_stop_service.rb2
-rw-r--r--app/services/environments/stop_service.rb (renamed from app/services/ci/stop_environments_service.rb)23
-rw-r--r--app/services/error_tracking/issue_details_service.rb26
-rw-r--r--app/services/error_tracking/issue_latest_event_service.rb27
-rw-r--r--app/services/error_tracking/issue_update_service.rb32
-rw-r--r--app/services/error_tracking/list_issues_service.rb37
-rw-r--r--app/services/feature_flags/create_service.rb2
-rw-r--r--app/services/feature_flags/destroy_service.rb2
-rw-r--r--app/services/feature_flags/update_service.rb2
-rw-r--r--app/services/git/base_hooks_service.rb5
-rw-r--r--app/services/git/branch_hooks_service.rb7
-rw-r--r--app/services/git/branch_push_service.rb2
-rw-r--r--app/services/git/process_ref_changes_service.rb6
-rw-r--r--app/services/groups/transfer_service.rb4
-rw-r--r--app/services/import/github_service.rb2
-rw-r--r--app/services/issuable/clone/attributes_rewriter.rb2
-rw-r--r--app/services/issuable/clone/base_service.rb2
-rw-r--r--app/services/issuable/common_system_notes_service.rb2
-rw-r--r--app/services/issuable/destroy_label_links_service.rb2
-rw-r--r--app/services/issuable_base_service.rb6
-rw-r--r--app/services/issues/close_service.rb8
-rw-r--r--app/services/issues/create_service.rb5
-rw-r--r--app/services/issues/reopen_service.rb6
-rw-r--r--app/services/jira/requests/base.rb80
-rw-r--r--app/services/members/create_service.rb15
-rw-r--r--app/services/members/import_project_team_service.rb37
-rw-r--r--app/services/merge_requests/base_service.rb4
-rw-r--r--app/services/merge_requests/conflicts/base_service.rb5
-rw-r--r--app/services/merge_requests/conflicts/list_service.rb6
-rw-r--r--app/services/merge_requests/create_pipeline_service.rb6
-rw-r--r--app/services/merge_requests/merge_to_ref_service.rb14
-rw-r--r--app/services/merge_requests/push_options_handler_service.rb2
-rw-r--r--app/services/namespaces/in_product_marketing_emails_service.rb27
-rw-r--r--app/services/notes/create_service.rb8
-rw-r--r--app/services/packages/composer/create_package_service.rb4
-rw-r--r--app/services/packages/create_dependency_service.rb4
-rw-r--r--app/services/packages/debian/generate_distribution_key_service.rb61
-rw-r--r--app/services/packages/debian/generate_distribution_service.rb95
-rw-r--r--app/services/packages/debian/sign_distribution_service.rb38
-rw-r--r--app/services/packages/go/create_package_service.rb2
-rw-r--r--app/services/packages/npm/create_package_service.rb2
-rw-r--r--app/services/packages/nuget/create_dependency_service.rb2
-rw-r--r--app/services/packages/nuget/update_package_from_metadata_service.rb63
-rw-r--r--app/services/packages/terraform_module/create_package_service.rb2
-rw-r--r--app/services/packages/update_package_file_service.rb58
-rw-r--r--app/services/packages/update_tags_service.rb2
-rw-r--r--app/services/post_receive_service.rb7
-rw-r--r--app/services/projects/after_rename_service.rb2
-rw-r--r--app/services/projects/cleanup_service.rb2
-rw-r--r--app/services/projects/create_service.rb8
-rw-r--r--app/services/projects/detect_repository_languages_service.rb2
-rw-r--r--app/services/projects/fetch_statistics_increment_service.rb2
-rw-r--r--app/services/projects/hashed_storage/migrate_repository_service.rb2
-rw-r--r--app/services/projects/hashed_storage/rollback_repository_service.rb2
-rw-r--r--app/services/projects/lfs_pointers/lfs_link_service.rb2
-rw-r--r--app/services/projects/lfs_pointers/lfs_object_download_list_service.rb18
-rw-r--r--app/services/projects/operations/update_service.rb9
-rw-r--r--app/services/projects/overwrite_project_service.rb17
-rw-r--r--app/services/projects/protect_default_branch_service.rb2
-rw-r--r--app/services/projects/transfer_service.rb10
-rw-r--r--app/services/projects/update_pages_service.rb120
-rw-r--r--app/services/projects/update_remote_mirror_service.rb7
-rw-r--r--app/services/releases/base_service.rb9
-rw-r--r--app/services/releases/create_service.rb4
-rw-r--r--app/services/releases/destroy_service.rb2
-rw-r--r--app/services/releases/update_service.rb4
-rw-r--r--app/services/resource_events/change_labels_service.rb2
-rw-r--r--app/services/search/global_service.rb3
-rw-r--r--app/services/security/merge_reports_service.rb70
-rw-r--r--app/services/service_ping/permit_data_categories_service.rb20
-rw-r--r--app/services/service_ping/service_ping_settings.rb17
-rw-r--r--app/services/service_ping/submit_service.rb2
-rw-r--r--app/services/spam/akismet_mark_as_spam_service.rb (renamed from app/services/spam/mark_as_spam_service.rb)6
-rw-r--r--app/services/spam/spam_action_service.rb4
-rw-r--r--app/services/suggestions/create_service.rb2
-rw-r--r--app/services/todos/destroy/destroyed_issuable_service.rb2
-rw-r--r--app/services/todos/destroy/entity_leave_service.rb15
-rw-r--r--app/services/users/ban_service.rb22
-rw-r--r--app/services/users/banned_user_base_service.rb37
-rw-r--r--app/services/users/set_status_service.rb12
-rw-r--r--app/services/users/unban_service.rb15
114 files changed, 1249 insertions, 420 deletions
diff --git a/app/services/admin/propagate_service_template.rb b/app/services/admin/propagate_service_template.rb
index 07be3c1027d..c251537c479 100644
--- a/app/services/admin/propagate_service_template.rb
+++ b/app/services/admin/propagate_service_template.rb
@@ -5,9 +5,7 @@ module Admin
include PropagateService
def propagate
- return unless integration.active?
-
- create_integration_for_projects_without_integration
+ # TODO: Remove this as part of https://gitlab.com/gitlab-org/gitlab/-/issues/335178
end
end
end
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index d42dcb2fd00..a2683647c72 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -21,7 +21,7 @@ module Auth
return error('DENIED', status: 403, message: 'access forbidden') unless has_registry_ability?
- unless scopes.any? || current_user || project
+ unless scopes.any? || current_user || deploy_token || project
return error('DENIED', status: 403, message: 'access forbidden')
end
@@ -124,7 +124,6 @@ module Auth
end
def migration_eligible(project, actions)
- return unless actions.include?('push')
return unless Feature.enabled?(:container_registry_migration_phase1)
# The migration process will start by allowing only specific test and gitlab-org projects using the
@@ -178,8 +177,7 @@ module Auth
end
def can_user?(ability, project)
- user = current_user.is_a?(User) ? current_user : nil
- can?(user, ability, project)
+ can?(current_user, ability, project)
end
def build_can_pull?(requested_project)
@@ -202,16 +200,16 @@ module Auth
def deploy_token_can_pull?(requested_project)
has_authentication_ability?(:read_container_image) &&
- current_user.is_a?(DeployToken) &&
- current_user.has_access_to?(requested_project) &&
- current_user.read_registry?
+ deploy_token.present? &&
+ deploy_token.has_access_to?(requested_project) &&
+ deploy_token.read_registry?
end
def deploy_token_can_push?(requested_project)
has_authentication_ability?(:create_container_image) &&
- current_user.is_a?(DeployToken) &&
- current_user.has_access_to?(requested_project) &&
- current_user.write_registry?
+ deploy_token.present? &&
+ deploy_token.has_access_to?(requested_project) &&
+ deploy_token.write_registry?
end
##
@@ -250,6 +248,10 @@ module Auth
{}
end
+ def deploy_token
+ params[:deploy_token]
+ end
+
def log_if_actions_denied(type, requested_project, requested_actions, authorized_actions)
return if requested_actions == authorized_actions
diff --git a/app/services/auth/dependency_proxy_authentication_service.rb b/app/services/auth/dependency_proxy_authentication_service.rb
index fab42e0ebb6..164594d6f6c 100644
--- a/app/services/auth/dependency_proxy_authentication_service.rb
+++ b/app/services/auth/dependency_proxy_authentication_service.rb
@@ -8,10 +8,7 @@ module Auth
def execute(authentication_abilities:)
return error('dependency proxy not enabled', 404) unless ::Gitlab.config.dependency_proxy.enabled
-
- # Because app/controllers/concerns/dependency_proxy/auth.rb consumes this
- # JWT only as `User.find`, we currently only allow User (not DeployToken, etc)
- return error('access forbidden', 403) unless current_user.is_a?(User)
+ return error('access forbidden', 403) unless valid_user_actor?
{ token: authorized_token.encoded }
end
@@ -36,11 +33,24 @@ module Auth
private
+ def valid_user_actor?
+ current_user || valid_deploy_token?
+ end
+
+ def valid_deploy_token?
+ deploy_token && deploy_token.valid_for_dependency_proxy?
+ end
+
def authorized_token
JSONWebToken::HMACToken.new(self.class.secret).tap do |token|
- token['user_id'] = current_user.id
+ token['user_id'] = current_user.id if current_user
+ token['deploy_token'] = deploy_token.token if deploy_token
token.expire_time = self.class.token_expire_at
end
end
+
+ def deploy_token
+ params[:deploy_token]
+ end
end
end
diff --git a/app/services/authorized_project_update/project_recalculate_per_user_service.rb b/app/services/authorized_project_update/project_recalculate_per_user_service.rb
new file mode 100644
index 00000000000..6141bfc6498
--- /dev/null
+++ b/app/services/authorized_project_update/project_recalculate_per_user_service.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module AuthorizedProjectUpdate
+ class ProjectRecalculatePerUserService < ProjectRecalculateService
+ def initialize(project, user)
+ @project = project
+ @user = user
+ end
+
+ private
+
+ attr_reader :user
+
+ def apply_scopes(project_authorizations)
+ super.where(user_id: user.id) # rubocop: disable CodeReuse/ActiveRecord
+ end
+
+ def effective_access_levels
+ Projects::Members::EffectiveAccessLevelPerUserFinder.new(project, user).execute
+ end
+ end
+end
diff --git a/app/services/authorized_project_update/project_recalculate_service.rb b/app/services/authorized_project_update/project_recalculate_service.rb
index cbb8efaf99f..d70d0efc2af 100644
--- a/app/services/authorized_project_update/project_recalculate_service.rb
+++ b/app/services/authorized_project_update/project_recalculate_service.rb
@@ -26,7 +26,7 @@ module AuthorizedProjectUpdate
def current_authorizations
strong_memoize(:current_authorizations) do
- project.project_authorizations
+ apply_scopes(project.project_authorizations)
.pluck(:user_id, :access_level) # rubocop: disable CodeReuse/ActiveRecord
end
end
@@ -35,8 +35,7 @@ module AuthorizedProjectUpdate
strong_memoize(:fresh_authorizations) do
result = []
- Projects::Members::EffectiveAccessLevelFinder.new(project)
- .execute
+ effective_access_levels
.each_batch(of: BATCH_SIZE, column: :user_id) do |member_batch|
result += member_batch.pluck(:user_id, 'MAX(access_level)') # rubocop: disable CodeReuse/ActiveRecord
end
@@ -76,5 +75,13 @@ module AuthorizedProjectUpdate
end
end
end
+
+ def apply_scopes(project_authorizations)
+ project_authorizations
+ end
+
+ def effective_access_levels
+ Projects::Members::EffectiveAccessLevelFinder.new(project).execute
+ end
end
end
diff --git a/app/services/auto_merge/base_service.rb b/app/services/auto_merge/base_service.rb
index 142eebca2e3..e756e8c14d8 100644
--- a/app/services/auto_merge/base_service.rb
+++ b/app/services/auto_merge/base_service.rb
@@ -6,7 +6,7 @@ module AutoMerge
include MergeRequests::AssignsMergeParams
def execute(merge_request)
- ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do # rubocop: disable Database/MultipleDatabases
register_auto_merge_parameters!(merge_request)
yield if block_given?
end
@@ -29,7 +29,7 @@ module AutoMerge
end
def cancel(merge_request)
- ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do # rubocop: disable Database/MultipleDatabases
clear_auto_merge_parameters!(merge_request)
yield if block_given?
end
@@ -41,7 +41,7 @@ module AutoMerge
end
def abort(merge_request, reason)
- ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do # rubocop: disable Database/MultipleDatabases
clear_auto_merge_parameters!(merge_request)
yield if block_given?
end
diff --git a/app/services/boards/base_item_move_service.rb b/app/services/boards/base_item_move_service.rb
index dfd0002cbc9..be16c595abb 100644
--- a/app/services/boards/base_item_move_service.rb
+++ b/app/services/boards/base_item_move_service.rb
@@ -4,13 +4,23 @@ module Boards
class BaseItemMoveService < Boards::BaseService
def execute(issuable)
issuable_modification_params = issuable_params(issuable)
- return false if issuable_modification_params.empty?
+ return if issuable_modification_params.empty?
- move_single_issuable(issuable, issuable_modification_params)
+ return unless move_single_issuable(issuable, issuable_modification_params)
+
+ success(issuable)
end
private
+ def success(issuable)
+ ServiceResponse.success(payload: { issuable: issuable })
+ end
+
+ def error(issuable, message)
+ ServiceResponse.error(payload: { issuable: issuable }, message: message)
+ end
+
def issuable_params(issuable)
attrs = {}
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 0e95bf7a434..8806e6788ff 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -42,7 +42,7 @@ module Boards
end
def set_issue_types
- params[:issue_types] = Issue::TYPES_FOR_LIST
+ params[:issue_types] ||= Issue::TYPES_FOR_LIST
end
def item_model
diff --git a/app/services/bulk_update_integration_service.rb b/app/services/bulk_update_integration_service.rb
index fc1580ab880..45465ba3946 100644
--- a/app/services/bulk_update_integration_service.rb
+++ b/app/services/bulk_update_integration_service.rb
@@ -9,10 +9,10 @@ class BulkUpdateIntegrationService
# rubocop: disable CodeReuse/ActiveRecord
def execute
Integration.transaction do
- Integration.where(id: batch.select(:id)).update_all(integration_hash)
+ Integration.where(id: batch_ids).update_all(integration_hash)
if integration.data_fields_present?
- integration.data_fields.class.where(service_id: batch.select(:id)).update_all(data_fields_hash)
+ integration.data_fields.class.where(service_id: batch_ids).update_all(data_fields_hash)
end
end
end
@@ -29,4 +29,13 @@ class BulkUpdateIntegrationService
def data_fields_hash
integration.to_data_fields_hash
end
+
+ def batch_ids
+ @batch_ids ||=
+ if batch.is_a?(ActiveRecord::Relation)
+ batch.select(:id)
+ else
+ batch.map(&:id)
+ end
+ end
end
diff --git a/app/services/ci/after_requeue_job_service.rb b/app/services/ci/after_requeue_job_service.rb
index b422e57baad..f717dd0862c 100644
--- a/app/services/ci/after_requeue_job_service.rb
+++ b/app/services/ci/after_requeue_job_service.rb
@@ -38,7 +38,7 @@ module Ci
end
def stage_dependent_jobs(processable)
- skipped_jobs(processable).scheduling_type_stage.after_stage(processable.stage_idx)
+ skipped_jobs(processable).after_stage(processable.stage_idx)
end
def needs_dependent_jobs(processable)
diff --git a/app/services/ci/append_build_trace_service.rb b/app/services/ci/append_build_trace_service.rb
index 8200f9790ee..0eef0ff0e61 100644
--- a/app/services/ci/append_build_trace_service.rb
+++ b/app/services/ci/append_build_trace_service.rb
@@ -71,8 +71,7 @@ module Ci
end
def trace_size_exceeded?(size)
- Feature.enabled?(:ci_jobs_trace_size_limit, project, default_enabled: :yaml) &&
- project.actual_limits.exceeded?(:ci_jobs_trace_size_limit, size / 1.megabyte)
+ project.actual_limits.exceeded?(:ci_jobs_trace_size_limit, size / 1.megabyte)
end
end
end
diff --git a/app/services/ci/build_cancel_service.rb b/app/services/ci/build_cancel_service.rb
new file mode 100644
index 00000000000..a23418ed738
--- /dev/null
+++ b/app/services/ci/build_cancel_service.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Ci
+ class BuildCancelService
+ def initialize(build, user)
+ @build = build
+ @user = user
+ end
+
+ def execute
+ return forbidden unless allowed?
+ return unprocessable_entity unless build.cancelable?
+
+ build.cancel
+
+ ServiceResponse.success(payload: build)
+ end
+
+ private
+
+ attr_reader :build, :user
+
+ def allowed?
+ user.can?(:update_build, build)
+ end
+
+ def forbidden
+ ServiceResponse.error(message: 'Forbidden', http_status: :forbidden)
+ end
+
+ def unprocessable_entity
+ ServiceResponse.error(message: 'Unprocessable entity', http_status: :unprocessable_entity)
+ end
+ end
+end
diff --git a/app/services/ci/build_unschedule_service.rb b/app/services/ci/build_unschedule_service.rb
new file mode 100644
index 00000000000..3b367ecc4c4
--- /dev/null
+++ b/app/services/ci/build_unschedule_service.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Ci
+ class BuildUnscheduleService
+ def initialize(build, user)
+ @build = build
+ @user = user
+ end
+
+ def execute
+ return forbidden unless allowed?
+ return unprocessable_entity unless build.scheduled?
+
+ build.unschedule!
+
+ ServiceResponse.success(payload: build)
+ end
+
+ private
+
+ attr_reader :build, :user
+
+ def allowed?
+ user.can?(:update_build, build)
+ end
+
+ def forbidden
+ ServiceResponse.error(message: 'Forbidden', http_status: :forbidden)
+ end
+
+ def unprocessable_entity
+ ServiceResponse.error(message: 'Unprocessable entity', http_status: :unprocessable_entity)
+ end
+ end
+end
diff --git a/app/services/ci/create_downstream_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index e9ec2338171..a65c22e273c 100644
--- a/app/services/ci/create_downstream_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -33,8 +33,9 @@ module Ci
current_user,
pipeline_params.fetch(:target_revision))
- downstream_pipeline = service.execute(
- pipeline_params.fetch(:source), **pipeline_params[:execute_params])
+ downstream_pipeline = service
+ .execute(pipeline_params.fetch(:source), **pipeline_params[:execute_params])
+ .payload
downstream_pipeline.tap do |pipeline|
update_bridge_status!(@bridge, pipeline)
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index c039f31aafc..ba9665555cc 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -11,11 +11,11 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
Gitlab::Ci::Pipeline::Chain::Validate::Repository,
Gitlab::Ci::Pipeline::Chain::Validate::SecurityOrchestrationPolicy,
+ Gitlab::Ci::Pipeline::Chain::Skip,
Gitlab::Ci::Pipeline::Chain::Config::Content,
Gitlab::Ci::Pipeline::Chain::Config::Process,
Gitlab::Ci::Pipeline::Chain::Validate::AfterConfig,
Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
- Gitlab::Ci::Pipeline::Chain::Skip,
Gitlab::Ci::Pipeline::Chain::SeedBlock,
Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules,
Gitlab::Ci::Pipeline::Chain::Seed,
@@ -87,12 +87,16 @@ module Ci
if pipeline.persisted?
schedule_head_pipeline_update
create_namespace_onboarding_action
+ else
+ # If pipeline is not persisted, try to recover IID
+ pipeline.reset_project_iid
end
- # If pipeline is not persisted, try to recover IID
- pipeline.reset_project_iid unless pipeline.persisted?
-
- pipeline
+ if error_message = pipeline.full_error_messages.presence || pipeline.failure_reason.presence
+ ServiceResponse.error(message: error_message, payload: pipeline)
+ else
+ ServiceResponse.success(payload: pipeline)
+ end
end
# rubocop: enable Metrics/ParameterLists
@@ -100,8 +104,8 @@ module Ci
source = args[0]
params = Hash(args[1])
- execute(source, **params, &block).tap do |pipeline|
- unless pipeline.persisted?
+ execute(source, **params, &block).tap do |response|
+ unless response.payload.persisted?
raise CreateError, pipeline.full_error_messages
end
end
diff --git a/app/services/ci/daily_build_group_report_result_service.rb b/app/services/ci/daily_build_group_report_result_service.rb
index 820e6e08fc5..25c6d57d961 100644
--- a/app/services/ci/daily_build_group_report_result_service.rb
+++ b/app/services/ci/daily_build_group_report_result_service.rb
@@ -3,7 +3,13 @@
module Ci
class DailyBuildGroupReportResultService
def execute(pipeline)
- DailyBuildGroupReportResult.upsert_reports(coverage_reports(pipeline))
+ if DailyBuildGroupReportResult.upsert_reports(coverage_reports(pipeline))
+ Projects::CiFeatureUsage.insert_usage(
+ project_id: pipeline.project_id,
+ feature: :code_coverage,
+ default_branch: pipeline.default_branch?
+ )
+ end
end
private
diff --git a/app/services/ci/delete_unit_tests_service.rb b/app/services/ci/delete_unit_tests_service.rb
index 28f96351175..230661a107d 100644
--- a/app/services/ci/delete_unit_tests_service.rb
+++ b/app/services/ci/delete_unit_tests_service.rb
@@ -23,7 +23,7 @@ module Ci
def delete_batch!(klass)
deleted = 0
- ActiveRecord::Base.transaction do
+ klass.transaction do
ids = klass.deletable.lock('FOR UPDATE SKIP LOCKED').limit(BATCH_SIZE).pluck(:id)
break if ids.empty?
diff --git a/app/services/ci/destroy_pipeline_service.rb b/app/services/ci/destroy_pipeline_service.rb
index 494fcb23a06..dd5c8e0379f 100644
--- a/app/services/ci/destroy_pipeline_service.rb
+++ b/app/services/ci/destroy_pipeline_service.rb
@@ -7,7 +7,7 @@ module Ci
Ci::ExpirePipelineCacheService.new.execute(pipeline, delete: true)
- pipeline.cancel_running if pipeline.cancelable? && ::Feature.enabled?(:cancel_pipelines_prior_to_destroy, default_enabled: :yaml)
+ pipeline.cancel_running if pipeline.cancelable?
pipeline.reset.destroy!
diff --git a/app/services/ci/drop_pipeline_service.rb b/app/services/ci/drop_pipeline_service.rb
index f510943251b..16d3abcbfa0 100644
--- a/app/services/ci/drop_pipeline_service.rb
+++ b/app/services/ci/drop_pipeline_service.rb
@@ -2,6 +2,9 @@
module Ci
class DropPipelineService
+ PRELOADED_COMMIT_STATUS_RELATIONS = [:project, :pipeline].freeze
+ PRELOADED_CI_BUILD_RELATIONS = [:metadata, :deployment, :taggings].freeze
+
# execute service asynchronously for each cancelable pipeline
def execute_async_for_all(pipelines, failure_reason, context_user)
pipelines.cancelable.select(:id).find_in_batches do |pipelines_batch|
@@ -27,11 +30,11 @@ module Ci
private
- def preload_associations_for_drop(builds_batch)
- ActiveRecord::Associations::Preloader.new.preload( # rubocop: disable CodeReuse/ActiveRecord
- builds_batch,
- [:project, :pipeline, :metadata, :deployment, :taggings]
- )
+ # rubocop: disable CodeReuse/ActiveRecord
+ def preload_associations_for_drop(commit_status_batch)
+ ActiveRecord::Associations::Preloader.new.preload(commit_status_batch, PRELOADED_COMMIT_STATUS_RELATIONS)
+ ActiveRecord::Associations::Preloader.new.preload(commit_status_batch.select { |job| job.is_a?(Ci::Build) }, PRELOADED_CI_BUILD_RELATIONS)
end
+ # rubocop: enable CodeReuse/ActiveRecord
end
end
diff --git a/app/services/ci/external_pull_requests/create_pipeline_service.rb b/app/services/ci/external_pull_requests/create_pipeline_service.rb
index 78be94bfb41..83499524a8e 100644
--- a/app/services/ci/external_pull_requests/create_pipeline_service.rb
+++ b/app/services/ci/external_pull_requests/create_pipeline_service.rb
@@ -7,7 +7,8 @@ module Ci
module ExternalPullRequests
class CreatePipelineService < BaseService
def execute(pull_request)
- return unless pull_request.open? && pull_request.actual_branch_head?
+ return pull_request_not_open_error unless pull_request.open?
+ return pull_request_branch_error unless pull_request.actual_branch_head?
create_pipeline_for(pull_request)
end
@@ -26,6 +27,14 @@ module Ci
target_sha: pull_request.target_sha
}
end
+
+ def pull_request_not_open_error
+ ServiceResponse.error(message: 'The pull request is not opened', payload: nil)
+ end
+
+ def pull_request_branch_error
+ ServiceResponse.error(message: 'The source sha is not the head of the source branch', payload: nil)
+ end
end
end
end
diff --git a/app/services/ci/extract_sections_from_build_trace_service.rb b/app/services/ci/extract_sections_from_build_trace_service.rb
deleted file mode 100644
index c756e376901..00000000000
--- a/app/services/ci/extract_sections_from_build_trace_service.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class ExtractSectionsFromBuildTraceService < BaseService
- def execute(build)
- return false unless build.trace_sections.empty?
-
- Gitlab::Database.bulk_insert(BuildTraceSection.table_name, extract_sections(build)) # rubocop:disable Gitlab/BulkInsert
- true
- end
-
- private
-
- # rubocop: disable CodeReuse/ActiveRecord
- def find_or_create_name(name)
- project.build_trace_section_names.find_or_create_by!(name: name)
- rescue ActiveRecord::RecordInvalid
- project.build_trace_section_names.find_by!(name: name)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def extract_sections(build)
- build.trace.extract_sections.map do |attr|
- name = attr.delete(:name)
- name_record = find_or_create_name(name)
-
- attr.merge(
- build_id: build.id,
- project_id: project.id,
- section_name_id: name_record.id)
- end
- end
- end
-end
diff --git a/app/services/ci/pipeline_trigger_service.rb b/app/services/ci/pipeline_trigger_service.rb
index 62c4d6b4599..7746382b845 100644
--- a/app/services/ci/pipeline_trigger_service.rb
+++ b/app/services/ci/pipeline_trigger_service.rb
@@ -27,13 +27,13 @@ module Ci
# this check is to not leak the presence of the project if user cannot read it
return unless trigger.project == project
- pipeline = Ci::CreatePipelineService
+ response = Ci::CreatePipelineService
.new(project, trigger.owner, ref: params[:ref], variables_attributes: variables)
.execute(:trigger, ignore_skip_ci: true) do |pipeline|
pipeline.trigger_requests.build(trigger: trigger)
end
- pipeline_service_response(pipeline)
+ pipeline_service_response(response.payload)
end
def pipeline_service_response(pipeline)
@@ -57,7 +57,7 @@ module Ci
# this check is to not leak the presence of the project if user cannot read it
return unless can?(job.user, :read_project, project)
- pipeline = Ci::CreatePipelineService
+ response = Ci::CreatePipelineService
.new(project, job.user, ref: params[:ref], variables_attributes: variables)
.execute(:pipeline, ignore_skip_ci: true) do |pipeline|
source = job.sourced_pipelines.build(
@@ -69,7 +69,7 @@ module Ci
pipeline.source_pipeline = source
end
- pipeline_service_response(pipeline)
+ pipeline_service_response(response.payload)
end
def job_from_token
diff --git a/app/services/ci/pipelines/add_job_service.rb b/app/services/ci/pipelines/add_job_service.rb
index 03bdb491200..41f9903e48c 100644
--- a/app/services/ci/pipelines/add_job_service.rb
+++ b/app/services/ci/pipelines/add_job_service.rb
@@ -3,21 +3,33 @@
module Ci
module Pipelines
class AddJobService
+ include ::Gitlab::ExclusiveLeaseHelpers
+
attr_reader :pipeline
def initialize(pipeline)
@pipeline = pipeline
- raise ArgumentError, "Pipeline must be persisted for this service to be used" unless @pipeline.persisted?
+ raise ArgumentError, "Pipeline must be persisted for this service to be used" unless pipeline.persisted?
end
def execute!(job, &block)
assign_pipeline_attributes(job)
- Ci::Pipeline.transaction do
- yield(job)
+ if Feature.enabled?(:ci_pipeline_add_job_with_lock, pipeline.project, default_enabled: :yaml)
+ in_lock("ci:pipelines:#{pipeline.id}:add-job", ttl: LOCK_TIMEOUT, sleep_sec: LOCK_SLEEP, retries: LOCK_RETRIES) do
+ Ci::Pipeline.transaction do
+ yield(job)
+
+ job.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, pipeline.project, default_enabled: :yaml)
+ end
+ end
+ else
+ Ci::Pipeline.transaction do
+ yield(job)
- job.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, @pipeline.project, default_enabled: :yaml)
+ job.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, pipeline.project, default_enabled: :yaml)
+ end
end
ServiceResponse.success(payload: { job: job })
@@ -27,10 +39,14 @@ module Ci
private
+ LOCK_TIMEOUT = 1.minute
+ LOCK_SLEEP = 0.5.seconds
+ LOCK_RETRIES = 20
+
def assign_pipeline_attributes(job)
- job.pipeline = @pipeline
- job.project = @pipeline.project
- job.ref = @pipeline.ref
+ job.pipeline = pipeline
+ job.project = pipeline.project
+ job.ref = pipeline.ref
end
end
end
diff --git a/app/services/ci/queue/builds_table_strategy.rb b/app/services/ci/queue/builds_table_strategy.rb
index c941734ac40..d0a343cb9d4 100644
--- a/app/services/ci/queue/builds_table_strategy.rb
+++ b/app/services/ci/queue/builds_table_strategy.rb
@@ -53,6 +53,14 @@ module Ci
relation.pluck(:id)
end
+ def use_denormalized_shared_runners_data?
+ false
+ end
+
+ def use_denormalized_minutes_data?
+ false
+ end
+
private
def running_builds_for_shared_runners
diff --git a/app/services/ci/queue/pending_builds_strategy.rb b/app/services/ci/queue/pending_builds_strategy.rb
index 55d5cb96a0a..efe3a981d3a 100644
--- a/app/services/ci/queue/pending_builds_strategy.rb
+++ b/app/services/ci/queue/pending_builds_strategy.rb
@@ -11,25 +11,9 @@ module Ci
# rubocop:disable CodeReuse/ActiveRecord
def builds_for_shared_runner
- relation = new_builds
- # don't run projects which have not enabled shared runners and builds
- .joins('INNER JOIN projects ON ci_pending_builds.project_id = projects.id')
- .where(projects: { shared_runners_enabled: true, pending_delete: false })
- .joins('LEFT JOIN project_features ON ci_pending_builds.project_id = project_features.project_id')
- .where('project_features.builds_access_level IS NULL or project_features.builds_access_level > 0')
+ shared_builds = builds_available_for_shared_runners
- if Feature.enabled?(:ci_queueing_disaster_recovery_disable_fair_scheduling, runner, type: :ops, default_enabled: :yaml)
- # if disaster recovery is enabled, we fallback to FIFO scheduling
- relation.order('ci_pending_builds.build_id ASC')
- else
- # Implement fair scheduling
- # this returns builds that are ordered by number of running builds
- # we prefer projects that don't use shared runners at all
- relation
- .with(running_builds_for_shared_runners_cte.to_arel)
- .joins("LEFT JOIN project_builds ON ci_pending_builds.project_id = project_builds.project_id")
- .order(Arel.sql('COALESCE(project_builds.running_builds, 0) ASC'), 'ci_pending_builds.build_id ASC')
- end
+ builds_ordered_for_shared_runners(shared_builds)
end
def builds_matching_tag_ids(relation, ids)
@@ -52,8 +36,44 @@ module Ci
relation.pluck(:build_id)
end
+ def use_denormalized_shared_runners_data?
+ ::Feature.enabled?(:ci_queueing_denormalize_shared_runners_information, runner, type: :development, default_enabled: :yaml)
+ end
+
+ def use_denormalized_minutes_data?
+ ::Feature.enabled?(:ci_queueing_denormalize_ci_minutes_information, runner, type: :development, default_enabled: :yaml)
+ end
+
private
+ def builds_available_for_shared_runners
+ if use_denormalized_shared_runners_data?
+ new_builds.with_instance_runners
+ else
+ new_builds
+ # don't run projects which have not enabled shared runners and builds
+ .joins('INNER JOIN projects ON ci_pending_builds.project_id = projects.id')
+ .where(projects: { shared_runners_enabled: true, pending_delete: false })
+ .joins('LEFT JOIN project_features ON ci_pending_builds.project_id = project_features.project_id')
+ .where('project_features.builds_access_level IS NULL or project_features.builds_access_level > 0')
+ end
+ end
+
+ def builds_ordered_for_shared_runners(relation)
+ if Feature.enabled?(:ci_queueing_disaster_recovery_disable_fair_scheduling, runner, type: :ops, default_enabled: :yaml)
+ # if disaster recovery is enabled, we fallback to FIFO scheduling
+ relation.order('ci_pending_builds.build_id ASC')
+ else
+ # Implement fair scheduling
+ # this returns builds that are ordered by number of running builds
+ # we prefer projects that don't use shared runners at all
+ relation
+ .with(running_builds_for_shared_runners_cte.to_arel)
+ .joins("LEFT JOIN project_builds ON ci_pending_builds.project_id = project_builds.project_id")
+ .order(Arel.sql('COALESCE(project_builds.running_builds, 0) ASC'), 'ci_pending_builds.build_id ASC')
+ end
+ end
+
def running_builds_for_shared_runners_cte
running_builds = ::Ci::RunningBuild
.instance_type
@@ -67,3 +87,5 @@ module Ci
end
end
end
+
+Ci::Queue::PendingBuildsStrategy.prepend_mod_with('Ci::Queue::PendingBuildsStrategy')
diff --git a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
index 9e3e6de3928..1d329fe7b53 100644
--- a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
+++ b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
@@ -5,6 +5,8 @@ module Ci
class AssignResourceFromResourceGroupService < ::BaseService
# rubocop: disable CodeReuse/ActiveRecord
def execute(resource_group)
+ release_resource_from_stale_jobs(resource_group)
+
free_resources = resource_group.resources.free.count
resource_group.processables.waiting_for_resource.take(free_resources).each do |processable|
@@ -12,6 +14,14 @@ module Ci
end
end
# rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ def release_resource_from_stale_jobs(resource_group)
+ resource_group.resources.stale_processables.find_each do |processable|
+ resource_group.release_resource_from(processable)
+ end
+ end
end
end
end
diff --git a/app/services/ci/unlock_artifacts_service.rb b/app/services/ci/unlock_artifacts_service.rb
index 07faf90dd6d..7c169cb8395 100644
--- a/app/services/ci/unlock_artifacts_service.rb
+++ b/app/services/ci/unlock_artifacts_service.rb
@@ -17,7 +17,7 @@ module Ci
SQL
loop do
- break if ActiveRecord::Base.connection.exec_query(query).empty?
+ break if Ci::Pipeline.connection.exec_query(query).empty?
end
end
diff --git a/app/services/dependency_proxy/find_or_create_blob_service.rb b/app/services/dependency_proxy/find_or_create_blob_service.rb
index bd06f9d7628..f3dbf31dcdb 100644
--- a/app/services/dependency_proxy/find_or_create_blob_service.rb
+++ b/app/services/dependency_proxy/find_or_create_blob_service.rb
@@ -10,10 +10,12 @@ module DependencyProxy
end
def execute
+ from_cache = true
file_name = @blob_sha.sub('sha256:', '') + '.gz'
blob = @group.dependency_proxy_blobs.find_or_build(file_name)
unless blob.persisted?
+ from_cache = false
result = DependencyProxy::DownloadBlobService
.new(@image, @blob_sha, @token).execute
@@ -28,7 +30,7 @@ module DependencyProxy
blob.save!
end
- success(blob: blob)
+ success(blob: blob, from_cache: from_cache)
end
private
diff --git a/app/services/dependency_proxy/find_or_create_manifest_service.rb b/app/services/dependency_proxy/find_or_create_manifest_service.rb
index ee608d715aa..0eb990ab7f8 100644
--- a/app/services/dependency_proxy/find_or_create_manifest_service.rb
+++ b/app/services/dependency_proxy/find_or_create_manifest_service.rb
@@ -17,10 +17,10 @@ module DependencyProxy
head_result = DependencyProxy::HeadManifestService.new(@image, @tag, @token).execute
- return success(manifest: @manifest) if cached_manifest_matches?(head_result)
+ return success(manifest: @manifest, from_cache: true) if cached_manifest_matches?(head_result)
pull_new_manifest
- respond
+ respond(from_cache: false)
rescue Timeout::Error, *Gitlab::HTTP::HTTP_ERRORS
respond
end
@@ -44,9 +44,9 @@ module DependencyProxy
@manifest && @manifest.digest == head_result[:digest] && @manifest.content_type == head_result[:content_type]
end
- def respond
+ def respond(from_cache: true)
if @manifest.persisted?
- success(manifest: @manifest)
+ success(manifest: @manifest, from_cache: from_cache)
else
error('Failed to download the manifest from the external registry', 503)
end
diff --git a/app/services/dependency_proxy/pull_manifest_service.rb b/app/services/dependency_proxy/pull_manifest_service.rb
index 737414c396e..31494773cc0 100644
--- a/app/services/dependency_proxy/pull_manifest_service.rb
+++ b/app/services/dependency_proxy/pull_manifest_service.rb
@@ -17,7 +17,7 @@ module DependencyProxy
file = Tempfile.new
begin
- file.write(response)
+ file.write(response.body)
file.flush
yield(success(file: file, digest: response.headers['docker-content-digest'], content_type: response.headers['content-type']))
diff --git a/app/services/deployments/update_environment_service.rb b/app/services/deployments/update_environment_service.rb
index 6f85779c285..83c37257297 100644
--- a/app/services/deployments/update_environment_service.rb
+++ b/app/services/deployments/update_environment_service.rb
@@ -26,7 +26,7 @@ module Deployments
end
def update_environment(deployment)
- ActiveRecord::Base.transaction do
+ ActiveRecord::Base.transaction do # rubocop: disable Database/MultipleDatabases
# Renew attributes at update
renew_external_url
renew_auto_stop_in
diff --git a/app/services/design_management/copy_design_collection/copy_service.rb b/app/services/design_management/copy_design_collection/copy_service.rb
index b40f6a81174..c43696442d2 100644
--- a/app/services/design_management/copy_design_collection/copy_service.rb
+++ b/app/services/design_management/copy_design_collection/copy_service.rb
@@ -36,7 +36,7 @@ module DesignManagement
with_temporary_branch do
copy_commits!
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
design_ids = copy_designs!
version_ids = copy_versions!
copy_actions!(design_ids, version_ids)
@@ -181,12 +181,12 @@ module DesignManagement
)
end
- # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
+ # TODO Replace `Gitlab::Database.main.bulk_insert` with `BulkInsertSafe`
# once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
# When this is fixed, we can remove the call to
# `with_project_iid_supply` above, since the objects will be instantiated
# and callbacks (including `ensure_project_iid!`) will fire.
- ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ ::Gitlab::Database.main.bulk_insert( # rubocop:disable Gitlab/BulkInsert
DesignManagement::Design.table_name,
new_rows,
return_ids: true
@@ -207,9 +207,9 @@ module DesignManagement
)
end
- # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
+ # TODO Replace `Gitlab::Database.main.bulk_insert` with `BulkInsertSafe`
# once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
- ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ ::Gitlab::Database.main.bulk_insert( # rubocop:disable Gitlab/BulkInsert
DesignManagement::Version.table_name,
new_rows,
return_ids: true
@@ -239,7 +239,7 @@ module DesignManagement
end
# We cannot use `BulkInsertSafe` because of the uploader mounted in `Action`.
- ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ ::Gitlab::Database.main.bulk_insert( # rubocop:disable Gitlab/BulkInsert
DesignManagement::Action.table_name,
new_rows
)
@@ -278,7 +278,7 @@ module DesignManagement
# We cannot use `BulkInsertSafe` due to the LfsObjectsProject#update_project_statistics
# callback that fires after_commit.
- ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ ::Gitlab::Database.main.bulk_insert( # rubocop:disable Gitlab/BulkInsert
LfsObjectsProject.table_name,
new_rows,
on_conflict: :do_nothing # Upsert
diff --git a/app/services/draft_notes/publish_service.rb b/app/services/draft_notes/publish_service.rb
index d73c3417a8b..3a1db16aaf4 100644
--- a/app/services/draft_notes/publish_service.rb
+++ b/app/services/draft_notes/publish_service.rb
@@ -32,26 +32,28 @@ module DraftNotes
review = Review.create!(author: current_user, merge_request: merge_request, project: project)
- draft_notes.map do |draft_note|
+ created_notes = draft_notes.map do |draft_note|
draft_note.review = review
- create_note_from_draft(draft_note)
+ create_note_from_draft(draft_note, skip_capture_diff_note_position: true)
end
- draft_notes.delete_all
+ capture_diff_note_positions(created_notes)
+ draft_notes.delete_all
set_reviewed
-
notification_service.async.new_review(review)
MergeRequests::ResolvedDiscussionNotificationService.new(project: project, current_user: current_user).execute(merge_request)
end
- def create_note_from_draft(draft)
+ def create_note_from_draft(draft, skip_capture_diff_note_position: false)
# Make sure the diff file is unfolded in order to find the correct line
# codes.
draft.diff_file&.unfold_diff_lines(draft.original_position)
- note = Notes::CreateService.new(draft.project, draft.author, draft.publish_params).execute
- set_discussion_resolve_status(note, draft)
+ note = Notes::CreateService.new(draft.project, draft.author, draft.publish_params).execute(
+ skip_capture_diff_note_position: skip_capture_diff_note_position
+ )
+ set_discussion_resolve_status(note, draft)
note
end
@@ -70,5 +72,19 @@ module DraftNotes
def set_reviewed
::MergeRequests::MarkReviewerReviewedService.new(project: project, current_user: current_user).execute(merge_request)
end
+
+ def capture_diff_note_positions(notes)
+ paths = notes.flat_map do |note|
+ note.diff_file&.paths if note.diff_note?
+ end
+
+ return if paths.empty?
+
+ capture_service = Discussions::CaptureDiffNotePositionService.new(merge_request, paths.compact)
+
+ notes.each do |note|
+ capture_service.execute(note.discussion) if note.diff_note? && note.start_of_discussion?
+ end
+ end
end
end
diff --git a/app/services/environments/auto_stop_service.rb b/app/services/environments/auto_stop_service.rb
index bde598abf66..4e3aec64283 100644
--- a/app/services/environments/auto_stop_service.rb
+++ b/app/services/environments/auto_stop_service.rb
@@ -32,7 +32,7 @@ module Environments
return false unless environments.exists?
- Ci::StopEnvironmentsService.execute_in_batch(environments)
+ Environments::StopService.execute_in_batch(environments)
end
end
end
diff --git a/app/services/ci/stop_environments_service.rb b/app/services/environments/stop_service.rb
index 7c9fc44e7f4..089aea11296 100644
--- a/app/services/ci/stop_environments_service.rb
+++ b/app/services/environments/stop_service.rb
@@ -1,19 +1,25 @@
# frozen_string_literal: true
-module Ci
- class StopEnvironmentsService < BaseService
+module Environments
+ class StopService < BaseService
attr_reader :ref
- def execute(branch_name)
+ def execute(environment)
+ return unless can?(current_user, :stop_environment, environment)
+
+ environment.stop_with_action!(current_user)
+ end
+
+ def execute_for_branch(branch_name)
@ref = branch_name
return unless @ref.present?
- environments.each { |environment| stop(environment) }
+ environments.each { |environment| execute(environment) }
end
def execute_for_merge_request(merge_request)
- merge_request.environments.each { |environment| stop(environment) }
+ merge_request.environments.each { |environment| execute(environment) }
end
##
@@ -39,12 +45,5 @@ module Ci
.new(project, current_user, ref: @ref, recently_updated: true)
.execute
end
-
- def stop(environment)
- return unless environment.stop_action_available?
- return unless can?(current_user, :stop_environment, environment)
-
- environment.stop_with_action!(current_user)
- end
end
end
diff --git a/app/services/error_tracking/issue_details_service.rb b/app/services/error_tracking/issue_details_service.rb
index 0068a9e9b6d..1614c597a8e 100644
--- a/app/services/error_tracking/issue_details_service.rb
+++ b/app/services/error_tracking/issue_details_service.rb
@@ -8,7 +8,7 @@ module ErrorTracking
private
def perform
- response = project_error_tracking_setting.issue_details(issue_id: params[:issue_id])
+ response = find_issue_details(params[:issue_id])
compose_response(response) do
# The gitlab_issue attribute can contain an absolute GitLab url from the Sentry Client
@@ -36,5 +36,29 @@ module ErrorTracking
def parse_response(response)
{ issue: response[:issue] }
end
+
+ def find_issue_details(issue_id)
+ # There are 2 types of the data source for the error tracking feature:
+ #
+ # * When integrated error tracking is enabled, we use the application database
+ # to read and save error tracking data.
+ #
+ # * When integrated error tracking is disabled we call
+ # project_error_tracking_setting method which works with Sentry API.
+ #
+ # Issue https://gitlab.com/gitlab-org/gitlab/-/issues/329596
+ #
+ if project_error_tracking_setting.integrated_client?
+ error = project.error_tracking_errors.find(issue_id)
+
+ # We use the same response format as project_error_tracking_setting
+ # method below for compatibility with existing code.
+ {
+ issue: error.to_sentry_detailed_error
+ }
+ else
+ project_error_tracking_setting.issue_details(issue_id: issue_id)
+ end
+ end
end
end
diff --git a/app/services/error_tracking/issue_latest_event_service.rb b/app/services/error_tracking/issue_latest_event_service.rb
index a39f1cde1b2..1bf86c658fc 100644
--- a/app/services/error_tracking/issue_latest_event_service.rb
+++ b/app/services/error_tracking/issue_latest_event_service.rb
@@ -5,7 +5,7 @@ module ErrorTracking
private
def perform
- response = project_error_tracking_setting.issue_latest_event(issue_id: params[:issue_id])
+ response = find_issue_latest_event(params[:issue_id])
compose_response(response)
end
@@ -13,5 +13,30 @@ module ErrorTracking
def parse_response(response)
{ latest_event: response[:latest_event] }
end
+
+ def find_issue_latest_event(issue_id)
+ # There are 2 types of the data source for the error tracking feature:
+ #
+ # * When integrated error tracking is enabled, we use the application database
+ # to read and save error tracking data.
+ #
+ # * When integrated error tracking is disabled we call
+ # project_error_tracking_setting method which works with Sentry API.
+ #
+ # Issue https://gitlab.com/gitlab-org/gitlab/-/issues/329596
+ #
+ if project_error_tracking_setting.integrated_client?
+ error = project.error_tracking_errors.find(issue_id)
+ event = error.events.last
+
+ # We use the same response format as project_error_tracking_setting
+ # method below for compatibility with existing code.
+ {
+ latest_event: event.to_sentry_error_event
+ }
+ else
+ project_error_tracking_setting.issue_latest_event(issue_id: issue_id)
+ end
+ end
end
end
diff --git a/app/services/error_tracking/issue_update_service.rb b/app/services/error_tracking/issue_update_service.rb
index 2f8bbfddef0..624e5f94dde 100644
--- a/app/services/error_tracking/issue_update_service.rb
+++ b/app/services/error_tracking/issue_update_service.rb
@@ -5,10 +5,12 @@ module ErrorTracking
private
def perform
- response = project_error_tracking_setting.update_issue(
+ update_opts = {
issue_id: params[:issue_id],
params: update_params
- )
+ }
+
+ response = update_issue(update_opts)
compose_response(response) do
project_error_tracking_setting.expire_issues_cache
@@ -69,5 +71,31 @@ module ErrorTracking
return error('Error Tracking is not enabled') unless enabled?
return error('Access denied', :unauthorized) unless can_update?
end
+
+ def update_issue(opts)
+ # There are 2 types of the data source for the error tracking feature:
+ #
+ # * When integrated error tracking is enabled, we use the application database
+ # to read and save error tracking data.
+ #
+ # * When integrated error tracking is disabled we call
+ # project_error_tracking_setting method which works with Sentry API.
+ #
+ # Issue https://gitlab.com/gitlab-org/gitlab/-/issues/329596
+ #
+ if project_error_tracking_setting.integrated_client?
+ error = project.error_tracking_errors.find(opts[:issue_id])
+ error.status = opts[:params][:status]
+ error.save!
+
+ # We use the same response format as project_error_tracking_setting
+ # method below for compatibility with existing code.
+ {
+ updated: true
+ }
+ else
+ project_error_tracking_setting.update_issue(**opts)
+ end
+ end
end
end
diff --git a/app/services/error_tracking/list_issues_service.rb b/app/services/error_tracking/list_issues_service.rb
index 7087e3825d6..5ddba748fd4 100644
--- a/app/services/error_tracking/list_issues_service.rb
+++ b/app/services/error_tracking/list_issues_service.rb
@@ -22,13 +22,15 @@ module ErrorTracking
def perform
return invalid_status_error unless valid_status?
- response = project_error_tracking_setting.list_sentry_issues(
+ sentry_opts = {
issue_status: issue_status,
limit: limit,
search_term: params[:search_term].presence,
sort: sort,
cursor: params[:cursor].presence
- )
+ }
+
+ response = list_issues(sentry_opts)
compose_response(response)
end
@@ -56,5 +58,36 @@ module ErrorTracking
def sort
params[:sort] || DEFAULT_SORT
end
+
+ def list_issues(opts)
+ # There are 2 types of the data source for the error tracking feature:
+ #
+ # * When integrated error tracking is enabled, we use the application database
+ # to read and save error tracking data.
+ #
+ # * When integrated error tracking is disabled we call
+ # project_error_tracking_setting method which works with Sentry API.
+ #
+ # Issue https://gitlab.com/gitlab-org/gitlab/-/issues/329596
+ #
+ if project_error_tracking_setting.integrated_client?
+ # We are going to support more options in the future.
+ # For now we implement the bare minimum for rendering the list in UI.
+ filter_opts = {
+ status: opts[:issue_status]
+ }
+
+ errors = ErrorTracking::ErrorsFinder.new(current_user, project, filter_opts).execute
+
+ # We use the same response format as project_error_tracking_setting
+ # method below for compatibility with existing code.
+ {
+ issues: errors.map(&:to_sentry_error),
+ pagination: {}
+ }
+ else
+ project_error_tracking_setting.list_sentry_issues(**opts)
+ end
+ end
end
end
diff --git a/app/services/feature_flags/create_service.rb b/app/services/feature_flags/create_service.rb
index 5c87af561d5..5111f914447 100644
--- a/app/services/feature_flags/create_service.rb
+++ b/app/services/feature_flags/create_service.rb
@@ -6,7 +6,7 @@ module FeatureFlags
return error('Access Denied', 403) unless can_create?
return error('Version is invalid', :bad_request) unless valid_version?
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
feature_flag = project.operations_feature_flags.new(params)
if feature_flag.save
diff --git a/app/services/feature_flags/destroy_service.rb b/app/services/feature_flags/destroy_service.rb
index b131a349fc7..986fe004db6 100644
--- a/app/services/feature_flags/destroy_service.rb
+++ b/app/services/feature_flags/destroy_service.rb
@@ -11,7 +11,7 @@ module FeatureFlags
def destroy_feature_flag(feature_flag)
return error('Access Denied', 403) unless can_destroy?(feature_flag)
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
if feature_flag.destroy
save_audit_event(audit_event(feature_flag))
diff --git a/app/services/feature_flags/update_service.rb b/app/services/feature_flags/update_service.rb
index f5ab6f4005b..01e4f661d75 100644
--- a/app/services/feature_flags/update_service.rb
+++ b/app/services/feature_flags/update_service.rb
@@ -12,7 +12,7 @@ module FeatureFlags
return error('Access Denied', 403) unless can_update?(feature_flag)
return error('Not Found', 404) unless valid_user_list_ids?(feature_flag, user_list_ids(params))
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
feature_flag.assign_attributes(params)
feature_flag.strategies.each do |strategy|
diff --git a/app/services/git/base_hooks_service.rb b/app/services/git/base_hooks_service.rb
index 1eb54e13522..aee2f685e97 100644
--- a/app/services/git/base_hooks_service.rb
+++ b/app/services/git/base_hooks_service.rb
@@ -25,6 +25,7 @@ module Git
raise NotImplementedError, "Please implement #{self.class}##{__method__}"
end
+ # The changeset, ordered with the newest commit last
def commits
raise NotImplementedError, "Please implement #{self.class}##{__method__}"
end
@@ -132,10 +133,10 @@ module Git
end
def event_push_data
- # We only need the last commit for the event push, and we don't
+ # We only need the newest commit for the event push, and we don't
# need the full deltas either.
@event_push_data ||= Gitlab::DataBuilder::Push.build(
- **push_data_params(commits: commits.last, with_changed_files: false)
+ **push_data_params(commits: limited_commits.last, with_changed_files: false)
)
end
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index a49b981c680..7a22d7ffcdf 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -21,8 +21,9 @@ module Git
def commits
strong_memoize(:commits) do
if creating_default_branch?
- # The most recent PROCESS_COMMIT_LIMIT commits in the default branch
- project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT)
+ # The most recent PROCESS_COMMIT_LIMIT commits in the default branch.
+ # They are returned newest-to-oldest, but we need to present them oldest-to-newest
+ project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT).reverse
elsif creating_branch?
# Use the pushed commits that aren't reachable by the default branch
# as a heuristic. This may include more commits than are actually
@@ -95,7 +96,7 @@ module Git
end
def track_ci_config_change_event
- return unless Gitlab::CurrentSettings.usage_ping_enabled?
+ return unless ::ServicePing::ServicePingSettings.enabled?
return unless default_branch?
commits_changing_ci_config.each do |commit|
diff --git a/app/services/git/branch_push_service.rb b/app/services/git/branch_push_service.rb
index 5dcc2de456c..5bf39d98fa3 100644
--- a/app/services/git/branch_push_service.rb
+++ b/app/services/git/branch_push_service.rb
@@ -58,7 +58,7 @@ module Git
def stop_environments
return unless removing_branch?
- Ci::StopEnvironmentsService.new(project, current_user).execute(branch_name)
+ Environments::StopService.new(project, current_user).execute_for_branch(branch_name)
end
def unlock_artifacts
diff --git a/app/services/git/process_ref_changes_service.rb b/app/services/git/process_ref_changes_service.rb
index 6f348ff9e0b..da05f18b5ac 100644
--- a/app/services/git/process_ref_changes_service.rb
+++ b/app/services/git/process_ref_changes_service.rb
@@ -51,7 +51,7 @@ module Git
change: change,
push_options: params[:push_options],
merge_request_branches: merge_request_branches,
- create_pipelines: change[:index] < PIPELINE_PROCESS_LIMIT || Feature.enabled?(:git_push_create_all_pipelines, project),
+ create_pipelines: under_process_limit?(change),
execute_project_hooks: execute_project_hooks,
create_push_event: !create_bulk_push_event
).execute
@@ -60,6 +60,10 @@ module Git
create_bulk_push_event(ref_type, action, changes) if create_bulk_push_event
end
+ def under_process_limit?(change)
+ change[:index] < PIPELINE_PROCESS_LIMIT || Feature.enabled?(:git_push_create_all_pipelines, project)
+ end
+
def create_bulk_push_event(ref_type, action, changes)
EventCreateService.new.bulk_push(
project,
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index 966d04ceb70..b7eae06b963 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -208,12 +208,12 @@ module Groups
end
def update_integrations
- @group.integrations.inherit.delete_all
+ @group.integrations.with_default_settings.delete_all
Integration.create_from_active_default_integrations(@group, :group_id)
end
def propagate_integrations
- @group.integrations.inherit.each do |integration|
+ @group.integrations.with_default_settings.each do |integration|
PropagateIntegrationWorker.perform_async(integration.id)
end
end
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index 2f808d45ffd..2aaab88e778 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -111,7 +111,7 @@ module Import
private
def log_error(exception)
- Gitlab::Import::Logger.error(
+ Gitlab::GithubImport::Logger.error(
message: 'Import failed due to a GitHub error',
status: exception.response_status,
error: exception.response_body
diff --git a/app/services/issuable/clone/attributes_rewriter.rb b/app/services/issuable/clone/attributes_rewriter.rb
index e1b4613726d..d8b639bb422 100644
--- a/app/services/issuable/clone/attributes_rewriter.rb
+++ b/app/services/issuable/clone/attributes_rewriter.rb
@@ -99,7 +99,7 @@ module Issuable
yield(event)
end.compact
- Gitlab::Database.bulk_insert(table_name, events) # rubocop:disable Gitlab/BulkInsert
+ Gitlab::Database.main.bulk_insert(table_name, events) # rubocop:disable Gitlab/BulkInsert
end
end
diff --git a/app/services/issuable/clone/base_service.rb b/app/services/issuable/clone/base_service.rb
index 574fe85b466..1d2c5c06d1b 100644
--- a/app/services/issuable/clone/base_service.rb
+++ b/app/services/issuable/clone/base_service.rb
@@ -14,7 +14,7 @@ module Issuable
# Using transaction because of a high resources footprint
# on rewriting notes (unfolding references)
#
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
@new_entity = create_new_entity
update_new_entity
diff --git a/app/services/issuable/common_system_notes_service.rb b/app/services/issuable/common_system_notes_service.rb
index aedd0c377c6..38050708fc5 100644
--- a/app/services/issuable/common_system_notes_service.rb
+++ b/app/services/issuable/common_system_notes_service.rb
@@ -9,7 +9,7 @@ module Issuable
# We disable touch so that created system notes do not update
# the noteable's updated_at field
- ActiveRecord::Base.no_touching do
+ ApplicationRecord.no_touching do
if is_update
if issuable.previous_changes.include?('title')
create_title_change_note(issuable.previous_changes['title'].first)
diff --git a/app/services/issuable/destroy_label_links_service.rb b/app/services/issuable/destroy_label_links_service.rb
index 6fff9b5e8d2..49000d90c1f 100644
--- a/app/services/issuable/destroy_label_links_service.rb
+++ b/app/services/issuable/destroy_label_links_service.rb
@@ -22,7 +22,7 @@ module Issuable
SQL
loop do
- result = ActiveRecord::Base.connection.execute(delete_query)
+ result = LabelLink.connection.execute(delete_query)
break if result.cmd_tuples == 0
end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 8d65865e7da..0984238517e 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -527,6 +527,12 @@ class IssuableBaseService < ::BaseProjectService
def allowed_update_params(params)
params
end
+
+ def update_issuable_sla(issuable)
+ return unless issuable_sla = issuable.issuable_sla
+
+ issuable_sla.update(issuable_closed: issuable.closed?)
+ end
end
IssuableBaseService.prepend_mod_with('IssuableBaseService')
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index cc4ad1a9c85..ea64239dd99 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -32,7 +32,7 @@ module Issues
notification_service.async.close_issue(issue, current_user, { closed_via: closed_via }) if notifications
todo_service.close_issue(issue, current_user)
- resolve_alert(issue)
+ perform_incident_management_actions(issue)
execute_hooks(issue, 'close')
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
@@ -51,6 +51,10 @@ module Issues
private
+ def perform_incident_management_actions(issue)
+ resolve_alert(issue)
+ end
+
def close_external_issue(issue, closed_via)
return unless project.external_issue_tracker&.support_close_issue?
@@ -89,3 +93,5 @@ module Issues
end
end
end
+
+Issues::CloseService.prepend_mod_with('Issues::CloseService')
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index 30d081996b1..b15b3e49c9a 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -9,11 +9,6 @@ module Issues
# in the caller (for example, an issue created via email) and the required arguments to the
# SpamParams constructor are not otherwise available, spam_params: must be explicitly passed as nil.
def initialize(project:, current_user: nil, params: {}, spam_params:)
- # Temporary check to ensure we are no longer passing request in params now that we have
- # introduced spam_params. Raise an exception if it is present.
- # Remove after https://gitlab.com/gitlab-org/gitlab/-/merge_requests/58603 is complete.
- raise if params[:request]
-
super(project: project, current_user: current_user, params: params)
@spam_params = spam_params
end
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index e2b1b5400c7..977b924ed72 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -9,6 +9,7 @@ module Issues
event_service.reopen_issue(issue, current_user)
create_note(issue, 'reopened')
notification_service.async.reopen_issue(issue, current_user)
+ perform_incident_management_actions(issue)
execute_hooks(issue, 'reopen')
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
@@ -21,8 +22,13 @@ module Issues
private
+ def perform_incident_management_actions(issue)
+ end
+
def create_note(issue, state = issue.state)
SystemNoteService.change_status(issue, issue.project, current_user, state, nil)
end
end
end
+
+Issues::ReopenService.prepend_mod_with('Issues::ReopenService')
diff --git a/app/services/jira/requests/base.rb b/app/services/jira/requests/base.rb
index e4e2736ca2f..56484075d08 100644
--- a/app/services/jira/requests/base.rb
+++ b/app/services/jira/requests/base.rb
@@ -6,6 +6,17 @@ module Jira
include ProjectServicesLoggable
JIRA_API_VERSION = 2
+ # Limit the size of the JSON error message we will attempt to parse, as the JSON is external input.
+ JIRA_ERROR_JSON_SIZE_LIMIT = 5_000
+
+ ERRORS = {
+ connection: [Errno::ECONNRESET, Errno::ECONNREFUSED],
+ jira_ruby: JIRA::HTTPError,
+ ssl: OpenSSL::SSL::SSLError,
+ timeout: [Timeout::Error, Errno::ETIMEDOUT],
+ uri: [URI::InvalidURIError, SocketError]
+ }.freeze
+ ALL_ERRORS = ERRORS.values.flatten.freeze
def initialize(jira_integration, params = {})
@project = jira_integration&.project
@@ -43,15 +54,66 @@ module Jira
def request
response = client.get(url)
build_service_response(response)
- rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET, Errno::ECONNREFUSED, URI::InvalidURIError, JIRA::HTTPError, OpenSSL::SSL::SSLError => error
- error_message = "Jira request error: #{error.message}"
- log_error("Error sending message", client_url: client.options[:site],
- error: {
- exception_class: error.class.name,
- exception_message: error.message,
- exception_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(error.backtrace)
- })
- ServiceResponse.error(message: error_message)
+ rescue *ALL_ERRORS => e
+ log_error('Error sending message',
+ client_url: client.options[:site],
+ error: {
+ exception_class: e.class.name,
+ exception_message: e.message,
+ exception_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(e.backtrace)
+ }
+ )
+
+ ServiceResponse.error(message: error_message(e))
+ end
+
+ def error_message(error)
+ reportable_error_message(error) ||
+ s_('JiraRequest|An error occurred while requesting data from Jira. Check your Jira integration configuration and try again.')
+ end
+
+ # Returns a user-facing error message if possible, otherwise `nil`.
+ def reportable_error_message(error)
+ case error
+ when ERRORS[:jira_ruby]
+ reportable_jira_ruby_error_message(error)
+ when ERRORS[:ssl]
+ s_('JiraRequest|An SSL error occurred while connecting to Jira: %{message}. Try your request again.') % { message: error.message }
+ when *ERRORS[:uri]
+ s_('JiraRequest|The Jira API URL for connecting to Jira is not valid. Check your Jira integration API URL and try again.')
+ when *ERRORS[:timeout]
+ s_('JiraRequest|A timeout error occurred while connecting to Jira. Try your request again.')
+ when *ERRORS[:connection]
+ s_('JiraRequest|A connection error occurred while connecting to Jira. Try your request again.')
+ end
+ end
+
+ # Returns a user-facing error message for a `JIRA::HTTPError` if possible,
+ # otherwise `nil`.
+ def reportable_jira_ruby_error_message(error)
+ case error.message
+ when 'Unauthorized'
+ s_('JiraRequest|The credentials for accessing Jira are not valid. Check your Jira integration credentials and try again.')
+ when 'Forbidden'
+ s_('JiraRequest|The credentials for accessing Jira are not allowed to access the data. Check your Jira integration credentials and try again.')
+ when 'Bad Request'
+ s_('JiraRequest|An error occurred while requesting data from Jira. Check your Jira integration configuration and try again.')
+ when /errorMessages/
+ jira_ruby_json_error_message(error.message)
+ end
+ end
+
+ def jira_ruby_json_error_message(error_message)
+ return if error_message.length > JIRA_ERROR_JSON_SIZE_LIMIT
+
+ begin
+ messages = Gitlab::Json.parse(error_message)['errorMessages']&.to_sentence
+ messages = Rails::Html::FullSanitizer.new.sanitize(messages).presence
+ return unless messages
+
+ s_('JiraRequest|An error occurred while requesting data from Jira: %{messages}. Check your Jira integration configuration and try again.') % { messages: messages }
+ rescue JSON::ParserError
+ end
end
def url
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index 5d3c4a5c54a..3e809b11024 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -80,6 +80,11 @@ module Members
def after_execute(member:)
super
+ track_invite_source(member)
+ track_areas_of_focus(member)
+ end
+
+ def track_invite_source(member)
Gitlab::Tracking.event(self.class.name, 'create_member', label: invite_source, property: tracking_property(member), user: current_user)
end
@@ -94,6 +99,16 @@ module Members
member.invite? ? 'net_new_user' : 'existing_user'
end
+ def track_areas_of_focus(member)
+ areas_of_focus.each do |area_of_focus|
+ Gitlab::Tracking.event(self.class.name, 'area_of_focus', label: area_of_focus, property: member.id.to_s)
+ end
+ end
+
+ def areas_of_focus
+ params[:areas_of_focus] || []
+ end
+
def user_limit
limit = params.fetch(:limit, DEFAULT_INVITE_LIMIT)
diff --git a/app/services/members/import_project_team_service.rb b/app/services/members/import_project_team_service.rb
new file mode 100644
index 00000000000..5f4d5414cfa
--- /dev/null
+++ b/app/services/members/import_project_team_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Members
+ class ImportProjectTeamService < BaseService
+ attr_reader :params, :current_user
+
+ def target_project_id
+ @target_project_id ||= params[:id].presence
+ end
+
+ def source_project_id
+ @source_project_id ||= params[:project_id].presence
+ end
+
+ def target_project
+ @target_project ||= Project.find_by_id(target_project_id)
+ end
+
+ def source_project
+ @source_project ||= Project.find_by_id(source_project_id)
+ end
+
+ def execute
+ import_project_team
+ end
+
+ private
+
+ def import_project_team
+ return false unless target_project.present? && source_project.present? && current_user.present?
+ return false unless can?(current_user, :read_project_member, source_project)
+ return false unless can?(current_user, :admin_project_member, target_project)
+
+ target_project.team.import(source_project, current_user)
+ end
+ end
+end
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index 099ab1d26e9..0a652c58aab 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -61,8 +61,8 @@ module MergeRequests
end
def cleanup_environments(merge_request)
- Ci::StopEnvironmentsService.new(merge_request.source_project, current_user)
- .execute_for_merge_request(merge_request)
+ Environments::StopService.new(merge_request.source_project, current_user)
+ .execute_for_merge_request(merge_request)
end
def cancel_review_app_jobs!(merge_request)
diff --git a/app/services/merge_requests/conflicts/base_service.rb b/app/services/merge_requests/conflicts/base_service.rb
index 402f6c4e4c0..70add2fdcb1 100644
--- a/app/services/merge_requests/conflicts/base_service.rb
+++ b/app/services/merge_requests/conflicts/base_service.rb
@@ -3,10 +3,11 @@
module MergeRequests
module Conflicts
class BaseService
- attr_reader :merge_request
+ attr_reader :merge_request, :params
- def initialize(merge_request)
+ def initialize(merge_request, params = {})
@merge_request = merge_request
+ @params = params
end
end
end
diff --git a/app/services/merge_requests/conflicts/list_service.rb b/app/services/merge_requests/conflicts/list_service.rb
index 30a493e91ce..575a6bfe95a 100644
--- a/app/services/merge_requests/conflicts/list_service.rb
+++ b/app/services/merge_requests/conflicts/list_service.rb
@@ -23,7 +23,11 @@ module MergeRequests
end
def conflicts
- @conflicts ||= Gitlab::Conflict::FileCollection.new(merge_request)
+ @conflicts ||=
+ Gitlab::Conflict::FileCollection.new(
+ merge_request,
+ allow_tree_conflicts: params[:allow_tree_conflicts]
+ )
end
end
end
diff --git a/app/services/merge_requests/create_pipeline_service.rb b/app/services/merge_requests/create_pipeline_service.rb
index ebeba0ee5b8..6b032545230 100644
--- a/app/services/merge_requests/create_pipeline_service.rb
+++ b/app/services/merge_requests/create_pipeline_service.rb
@@ -3,7 +3,7 @@
module MergeRequests
class CreatePipelineService < MergeRequests::BaseService
def execute(merge_request)
- return unless can_create_pipeline_for?(merge_request)
+ return cannot_create_pipeline_error unless can_create_pipeline_for?(merge_request)
create_detached_merge_request_pipeline(merge_request)
end
@@ -60,6 +60,10 @@ module MergeRequests
::Gitlab::UserAccess.new(current_user, container: merge_request.target_project)
.can_update_branch?(merge_request.source_branch_ref)
end
+
+ def cannot_create_pipeline_error
+ ServiceResponse.error(message: 'Cannot create a pipeline for this merge request.', payload: nil)
+ end
end
end
diff --git a/app/services/merge_requests/merge_to_ref_service.rb b/app/services/merge_requests/merge_to_ref_service.rb
index e07e0c985b4..eda652c4b9a 100644
--- a/app/services/merge_requests/merge_to_ref_service.rb
+++ b/app/services/merge_requests/merge_to_ref_service.rb
@@ -66,6 +66,16 @@ module MergeRequests
end
def commit
+ if Feature.enabled?(:cache_merge_to_ref_calls, project, default_enabled: false)
+ Rails.cache.fetch(cache_key, expires_in: 1.day) do
+ extracted_merge_to_ref
+ end
+ else
+ extracted_merge_to_ref
+ end
+ end
+
+ def extracted_merge_to_ref
repository.merge_to_ref(current_user,
source_sha: source,
branch: merge_request.target_branch,
@@ -76,5 +86,9 @@ module MergeRequests
rescue Gitlab::Git::PreReceiveError, Gitlab::Git::CommandError => error
raise MergeError, error.message
end
+
+ def cache_key
+ [:merge_to_ref_service, project.full_path, merge_request.target_branch_sha, merge_request.source_branch_sha]
+ end
end
end
diff --git a/app/services/merge_requests/push_options_handler_service.rb b/app/services/merge_requests/push_options_handler_service.rb
index 79b7eb8d9d8..adbe3ddfdad 100644
--- a/app/services/merge_requests/push_options_handler_service.rb
+++ b/app/services/merge_requests/push_options_handler_service.rb
@@ -65,7 +65,7 @@ module MergeRequests
end
if push_options[:target] && !target_project.repository.branch_exists?(push_options[:target])
- errors << "Branch #{push_options[:target]} does not exist"
+ errors << "Target branch #{target_project.full_path}:#{push_options[:target]} does not exist"
end
end
diff --git a/app/services/namespaces/in_product_marketing_emails_service.rb b/app/services/namespaces/in_product_marketing_emails_service.rb
index f7f0cf9abe8..0401653cf3c 100644
--- a/app/services/namespaces/in_product_marketing_emails_service.rb
+++ b/app/services/namespaces/in_product_marketing_emails_service.rb
@@ -8,8 +8,23 @@ module Namespaces
completed_actions: [:created],
incomplete_actions: [:git_write]
},
+ team_short: {
+ interval_days: [1],
+ completed_actions: [:git_write],
+ incomplete_actions: [:user_added]
+ },
+ trial_short: {
+ interval_days: [2],
+ completed_actions: [:git_write],
+ incomplete_actions: [:trial_started]
+ },
+ admin_verify: {
+ interval_days: [3],
+ completed_actions: [:git_write],
+ incomplete_actions: [:pipeline_created]
+ },
verify: {
- interval_days: [1, 5, 10],
+ interval_days: [4, 8, 13],
completed_actions: [:git_write],
incomplete_actions: [:pipeline_created]
},
@@ -98,14 +113,14 @@ module Namespaces
def can_perform_action?(user, group)
case track
- when :create
- user.can?(:create_projects, group)
- when :verify
+ when :create, :verify
user.can?(:create_projects, group)
- when :trial
+ when :trial, :trial_short
user.can?(:start_trial, group)
- when :team
+ when :team, :team_short
user.can?(:admin_group_member, group)
+ when :admin_verify
+ user.can?(:admin_group, group)
when :experience
true
end
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index 542fafb901b..194c3d7bf7b 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -4,7 +4,7 @@ module Notes
class CreateService < ::Notes::BaseService
include IncidentManagement::UsageData
- def execute
+ def execute(skip_capture_diff_note_position: false)
note = Notes::BuildService.new(project, current_user, params.except(:merge_request_diff_head_sha)).execute
# n+1: https://gitlab.com/gitlab-org/gitlab-foss/issues/37440
@@ -34,7 +34,7 @@ module Notes
end
end
- when_saved(note) if note_saved
+ when_saved(note, skip_capture_diff_note_position: skip_capture_diff_note_position) if note_saved
end
note
@@ -68,14 +68,14 @@ module Notes
end
end
- def when_saved(note)
+ def when_saved(note, skip_capture_diff_note_position: false)
todo_service.new_note(note, current_user)
clear_noteable_diffs_cache(note)
Suggestions::CreateService.new(note).execute
increment_usage_counter(note)
track_event(note, current_user)
- if note.for_merge_request? && note.diff_note? && note.start_of_discussion?
+ if !skip_capture_diff_note_position && note.for_merge_request? && note.diff_note? && note.start_of_discussion?
Discussions::CaptureDiffNotePositionService.new(note.noteable, note.diff_file&.paths).execute(note.discussion)
end
end
diff --git a/app/services/packages/composer/create_package_service.rb b/app/services/packages/composer/create_package_service.rb
index c84d40c3753..8215a3385a4 100644
--- a/app/services/packages/composer/create_package_service.rb
+++ b/app/services/packages/composer/create_package_service.rb
@@ -17,7 +17,9 @@ module Packages
})
end
- ::Packages::Composer::CacheUpdateWorker.perform_async(created_package.project_id, created_package.name, nil)
+ unless Feature.enabled?(:remove_composer_v1_cache_code, project)
+ ::Packages::Composer::CacheUpdateWorker.perform_async(created_package.project_id, created_package.name, nil)
+ end
created_package
end
diff --git a/app/services/packages/create_dependency_service.rb b/app/services/packages/create_dependency_service.rb
index 2999885d55d..2c80ec66dbc 100644
--- a/app/services/packages/create_dependency_service.rb
+++ b/app/services/packages/create_dependency_service.rb
@@ -27,7 +27,7 @@ module Packages
dependencies_to_insert = names_and_version_patterns.reject { |k, _| k.in?(existing_names) }
end
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
inserted_ids = bulk_insert_package_dependencies(dependencies_to_insert)
bulk_insert_package_dependency_links(type, (existing_ids + inserted_ids))
end
@@ -76,7 +76,7 @@ module Packages
end
def database
- ::Gitlab::Database
+ ::Gitlab::Database.main
end
end
end
diff --git a/app/services/packages/debian/generate_distribution_key_service.rb b/app/services/packages/debian/generate_distribution_key_service.rb
index 28c97c7681e..917965da58e 100644
--- a/app/services/packages/debian/generate_distribution_key_service.rb
+++ b/app/services/packages/debian/generate_distribution_key_service.rb
@@ -5,20 +5,42 @@ module Packages
class GenerateDistributionKeyService
include Gitlab::Utils::StrongMemoize
- def initialize(current_user:, params: {})
- @current_user = current_user
+ def initialize(params: {})
@params = params
end
def execute
- raise ArgumentError, 'Please provide a user' unless current_user.is_a?(User)
+ using_pinentry do |ctx|
+ # Generate key
+ ctx.generate_key generate_key_params
+
+ key = ctx.keys.first # rubocop:disable Gitlab/KeysFirstAndValuesFirst
+ fingerprint = key.fingerprint
+
+ # Export private key
+ data = GPGME::Data.new
+ ctx.export_keys fingerprint, data, GPGME::EXPORT_MODE_SECRET
+ data.seek 0
+ private_key = data.read
- generate_key
+ # Export public key
+ data = GPGME::Data.new
+ ctx.export_keys fingerprint, data
+ data.seek 0
+ public_key = data.read
+
+ {
+ private_key: private_key,
+ public_key: public_key,
+ passphrase: passphrase,
+ fingerprint: fingerprint
+ }
+ end
end
private
- attr_reader :current_user, :params
+ attr_reader :params
def passphrase
strong_memoize(:passphrase) do
@@ -72,35 +94,6 @@ module Packages
}.map { |k, v| "#{k}: #{v}\n" }.join +
'</GnupgKeyParms>'
end
-
- def generate_key
- using_pinentry do |ctx|
- # Generate key
- ctx.generate_key generate_key_params
-
- key = ctx.keys.first # rubocop:disable Gitlab/KeysFirstAndValuesFirst
- fingerprint = key.fingerprint
-
- # Export private key
- data = GPGME::Data.new
- ctx.export_keys fingerprint, data, GPGME::EXPORT_MODE_SECRET
- data.seek 0
- private_key = data.read
-
- # Export public key
- data = GPGME::Data.new
- ctx.export_keys fingerprint, data
- data.seek 0
- public_key = data.read
-
- {
- private_key: private_key,
- public_key: public_key,
- passphrase: passphrase,
- fingerprint: fingerprint
- }
- end
- end
end
end
end
diff --git a/app/services/packages/debian/generate_distribution_service.rb b/app/services/packages/debian/generate_distribution_service.rb
index 651325c49a0..74b07e05aa6 100644
--- a/app/services/packages/debian/generate_distribution_service.rb
+++ b/app/services/packages/debian/generate_distribution_service.rb
@@ -12,7 +12,7 @@ module Packages
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
# From https://salsa.debian.org/ftp-team/dak/-/blob/991aaa27a7f7aa773bb9c0cf2d516e383d9cffa0/setup/core-init.d/080_metadatakeys#L9
- BINARIES_METADATA = %w(
+ METADATA_KEYS = %w(
Package
Source
Binary
@@ -89,15 +89,18 @@ module Packages
@distribution.components.ordered_by_name.each do |component|
@distribution.architectures.ordered_by_name.each do |architecture|
generate_component_file(component, :packages, architecture, :deb)
+ generate_component_file(component, :di_packages, architecture, :udeb)
end
+ generate_component_file(component, :source, nil, :dsc)
end
end
def generate_component_file(component, component_file_type, architecture, package_file_type)
paragraphs = @distribution.package_files
+ .preload_package
.preload_debian_file_metadata
.with_debian_component_name(component.name)
- .with_debian_architecture_name(architecture.name)
+ .with_debian_architecture_name(architecture&.name)
.with_debian_file_type(package_file_type)
.find_each
.map(&method(:package_stanza_from_fields))
@@ -106,21 +109,49 @@ module Packages
def package_stanza_from_fields(package_file)
[
- BINARIES_METADATA.map do |metadata_key|
- rfc822_field(metadata_key, package_file.debian_fields[metadata_key])
+ METADATA_KEYS.map do |metadata_key|
+ metadata_name = metadata_key
+ metadata_value = package_file.debian_fields[metadata_key]
+
+ if package_file.debian_dsc?
+ metadata_name = 'Package' if metadata_key == 'Source'
+ checksum = case metadata_key
+ when 'Files' then package_file.file_md5
+ when 'Checksums-Sha256' then package_file.file_sha256
+ when 'Checksums-Sha1' then package_file.file_sha1
+ end
+
+ if checksum
+ metadata_value = "\n#{checksum} #{package_file.size} #{package_file.file_name}#{metadata_value}"
+ end
+ end
+
+ rfc822_field(metadata_name, metadata_value)
end,
rfc822_field('Section', package_file.debian_fields['Section'] || 'misc'),
rfc822_field('Priority', package_file.debian_fields['Priority'] || 'extra'),
- rfc822_field('Filename', package_filename(package_file)),
- rfc822_field('Size', package_file.size),
- rfc822_field('MD5sum', package_file.file_md5),
- rfc822_field('SHA256', package_file.file_sha256)
+ package_file_extra_fields(package_file)
].flatten.compact.join('')
end
- def package_filename(package_file)
+ def package_file_extra_fields(package_file)
+ if package_file.debian_dsc?
+ [
+ rfc822_field('Directory', package_dirname(package_file))
+ ]
+ else
+ [
+ rfc822_field('Filename', "#{package_dirname(package_file)}/#{package_file.file_name}"),
+ rfc822_field('Size', package_file.size),
+ rfc822_field('MD5sum', package_file.file_md5),
+ rfc822_field('SHA256', package_file.file_sha256)
+ ]
+ end
+ end
+
+ def package_dirname(package_file)
letter = package_file.package.name.start_with?('lib') ? package_file.package.name[0..3] : package_file.package.name[0]
- "#{pool_prefix(package_file)}/#{letter}/#{package_file.package.name}/#{package_file.file_name}"
+ "#{pool_prefix(package_file)}/#{letter}/#{package_file.package.name}/#{package_file.package.version}"
end
def pool_prefix(package_file)
@@ -128,7 +159,7 @@ module Packages
when ::Packages::Debian::GroupDistribution
"pool/#{@distribution.codename}/#{package_file.package.project_id}"
else
- "pool/#{@distribution.codename}/#{@distribution.container_id}"
+ "pool/#{@distribution.codename}"
end
end
@@ -161,28 +192,37 @@ module Packages
end
def generate_release
- @distribution.file = CarrierWaveStringFile.new(release_header + release_sums)
+ @distribution.key || @distribution.create_key(GenerateDistributionKeyService.new.execute)
+ @distribution.file = CarrierWaveStringFile.new(release_content)
+ @distribution.file_signature = SignDistributionService.new(@distribution, release_content, detach: true).execute
+ @distribution.signed_file = CarrierWaveStringFile.new(
+ SignDistributionService.new(@distribution, release_content).execute
+ )
@distribution.updated_at = release_date
@distribution.save!
end
- def release_header
- strong_memoize(:release_header) do
- [
- %w[origin label suite version codename].map do |attribute|
- rfc822_field(attribute.capitalize, @distribution.attributes[attribute])
- end,
- rfc822_field('Date', release_date.to_formatted_s(:rfc822)),
- valid_until_field,
- rfc822_field('NotAutomatic', !@distribution.automatic, !@distribution.automatic),
- rfc822_field('ButAutomaticUpgrades', @distribution.automatic_upgrades, !@distribution.automatic && @distribution.automatic_upgrades),
- rfc822_field('Architectures', @distribution.architectures.map { |architecture| architecture.name }.sort.join(' ')),
- rfc822_field('Components', @distribution.components.map { |component| component.name }.sort.join(' ')),
- rfc822_field('Description', @distribution.description)
- ].flatten.compact.join('')
+ def release_content
+ strong_memoize(:release_content) do
+ release_header + release_sums
end
end
+ def release_header
+ [
+ %w[origin label suite version codename].map do |attribute|
+ rfc822_field(attribute.capitalize, @distribution.attributes[attribute])
+ end,
+ rfc822_field('Date', release_date.to_formatted_s(:rfc822)),
+ valid_until_field,
+ rfc822_field('NotAutomatic', !@distribution.automatic, !@distribution.automatic),
+ rfc822_field('ButAutomaticUpgrades', @distribution.automatic_upgrades, !@distribution.automatic && @distribution.automatic_upgrades),
+ rfc822_field('Architectures', @distribution.architectures.map { |architecture| architecture.name }.sort.join(' ')),
+ rfc822_field('Components', @distribution.components.map { |component| component.name }.sort.join(' ')),
+ rfc822_field('Description', @distribution.description)
+ ].flatten.compact.join('')
+ end
+
def release_date
strong_memoize(:release_date) do
Time.now.utc
@@ -197,7 +237,8 @@ module Packages
return unless condition
return if value.blank?
- "#{name}: #{value.to_s.gsub("\n\n", "\n.\n").gsub("\n", "\n ")}\n"
+ value = " #{value}" unless value[0] == "\n"
+ "#{name}:#{value.to_s.gsub("\n\n", "\n.\n").gsub("\n", "\n ")}\n"
end
def valid_until_field
diff --git a/app/services/packages/debian/sign_distribution_service.rb b/app/services/packages/debian/sign_distribution_service.rb
new file mode 100644
index 00000000000..7797f7e9c0a
--- /dev/null
+++ b/app/services/packages/debian/sign_distribution_service.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Packages
+ module Debian
+ class SignDistributionService
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(distribution, content, detach: false)
+ @distribution = distribution
+ @content = content
+ @detach = detach
+ end
+
+ def execute
+ raise ArgumentError, 'distribution key is missing' unless @distribution.key
+
+ sig_mode = GPGME::GPGME_SIG_MODE_CLEAR
+
+ sig_mode = GPGME::GPGME_SIG_MODE_DETACH if @detach
+
+ Gitlab::Gpg.using_tmp_keychain do
+ GPGME::Ctx.new(
+ armor: true,
+ offline: true,
+ pinentry_mode: GPGME::PINENTRY_MODE_LOOPBACK,
+ password: @distribution.key.passphrase
+ ) do |ctx|
+ ctx.import(GPGME::Data.from_str(@distribution.key.public_key))
+ ctx.import(GPGME::Data.from_str(@distribution.key.private_key))
+ signature = GPGME::Data.new
+ ctx.sign(GPGME::Data.from_str(@content), signature, sig_mode)
+ signature.to_s
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/go/create_package_service.rb b/app/services/packages/go/create_package_service.rb
index 4e8b8ef8d6b..2a6eeff402e 100644
--- a/app/services/packages/go/create_package_service.rb
+++ b/app/services/packages/go/create_package_service.rb
@@ -23,7 +23,7 @@ module Packages
files[:mod] = prepare_file(version, :mod, version.gomod)
files[:zip] = prepare_file(version, :zip, version.archive.string)
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
# create new package and files
package = create_package
files.each { |type, (file, digests)| create_file(package, type, file, digests) }
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
index 22396eb7687..1d5d9c38432 100644
--- a/app/services/packages/npm/create_package_service.rb
+++ b/app/services/packages/npm/create_package_service.rb
@@ -9,7 +9,7 @@ module Packages
return error('Package already exists.', 403) if current_package_exists?
return error('File is too large.', 400) if file_size_exceeded?
- ActiveRecord::Base.transaction { create_npm_package! }
+ ApplicationRecord.transaction { create_npm_package! }
end
private
diff --git a/app/services/packages/nuget/create_dependency_service.rb b/app/services/packages/nuget/create_dependency_service.rb
index 62ab485c0fc..3fc42056d43 100644
--- a/app/services/packages/nuget/create_dependency_service.rb
+++ b/app/services/packages/nuget/create_dependency_service.rb
@@ -41,7 +41,7 @@ module Packages
}
end
- ::Gitlab::Database.bulk_insert(::Packages::Nuget::DependencyLinkMetadatum.table_name, rows.compact) # rubocop:disable Gitlab/BulkInsert
+ ::Gitlab::Database.main.bulk_insert(::Packages::Nuget::DependencyLinkMetadatum.table_name, rows.compact) # rubocop:disable Gitlab/BulkInsert
end
def raw_dependency_for(dependency)
diff --git a/app/services/packages/nuget/update_package_from_metadata_service.rb b/app/services/packages/nuget/update_package_from_metadata_service.rb
index 2d1733421fd..6ffe4f097f4 100644
--- a/app/services/packages/nuget/update_package_from_metadata_service.rb
+++ b/app/services/packages/nuget/update_package_from_metadata_service.rb
@@ -21,22 +21,11 @@ module Packages
try_obtain_lease do
@package_file.transaction do
- if existing_package
- package = link_to_existing_package
- elsif symbol_package?
- raise InvalidMetadataError, 'symbol package is invalid, matching package does not exist'
+ if use_new_package_file_updater?
+ new_execute
else
- package = update_linked_package
+ legacy_execute
end
-
- update_package(package)
-
- # Updating file_name updates the path where the file is stored.
- # We must pass the file again so that CarrierWave can handle the update
- @package_file.update!(
- file_name: package_filename,
- file: @package_file.file
- )
end
end
rescue ActiveRecord::RecordInvalid => e
@@ -45,6 +34,52 @@ module Packages
private
+ def new_execute
+ package_to_destroy = nil
+ target_package = @package_file.package
+
+ if existing_package
+ package_to_destroy = @package_file.package
+ target_package = existing_package
+ else
+ if symbol_package?
+ raise InvalidMetadataError, 'symbol package is invalid, matching package does not exist'
+ end
+
+ update_linked_package
+ end
+
+ update_package(target_package)
+
+ ::Packages::UpdatePackageFileService.new(@package_file, package_id: target_package.id, file_name: package_filename)
+ .execute
+
+ package_to_destroy&.destroy!
+ end
+
+ def legacy_execute
+ if existing_package
+ package = link_to_existing_package
+ elsif symbol_package?
+ raise InvalidMetadataError, 'symbol package is invalid, matching package does not exist'
+ else
+ package = update_linked_package
+ end
+
+ update_package(package)
+
+ # Updating file_name updates the path where the file is stored.
+ # We must pass the file again so that CarrierWave can handle the update
+ @package_file.update!(
+ file_name: package_filename,
+ file: @package_file.file
+ )
+ end
+
+ def use_new_package_file_updater?
+ ::Feature.enabled?(:packages_nuget_new_package_file_updater, @package_file.project, default_enabled: :yaml)
+ end
+
def update_package(package)
return if symbol_package?
diff --git a/app/services/packages/terraform_module/create_package_service.rb b/app/services/packages/terraform_module/create_package_service.rb
index fc376c70b00..03f749edfa8 100644
--- a/app/services/packages/terraform_module/create_package_service.rb
+++ b/app/services/packages/terraform_module/create_package_service.rb
@@ -11,7 +11,7 @@ module Packages
return error('Package version already exists.', 403) if current_package_version_exists?
return error('File is too large.', 400) if file_size_exceeded?
- ActiveRecord::Base.transaction { create_terraform_module_package! }
+ ApplicationRecord.transaction { create_terraform_module_package! }
end
private
diff --git a/app/services/packages/update_package_file_service.rb b/app/services/packages/update_package_file_service.rb
new file mode 100644
index 00000000000..9a8a78e509a
--- /dev/null
+++ b/app/services/packages/update_package_file_service.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Packages
+ class UpdatePackageFileService
+ delegate :file, to: :@package_file
+
+ def initialize(package_file, params)
+ @package_file = package_file
+ @params = params
+ end
+
+ def execute
+ check_params
+
+ return if same_as_params?
+
+ # we need to access the file *before* updating the attributes linked to its path/key.
+ file_storage_mode = file.file_storage?
+
+ @package_file.package_id = package_id if package_id
+ @package_file.file_name = file_name if file_name
+
+ if file_storage_mode
+ # package file is in mode LOCAL: we can pass the `file` to the update
+ @package_file.file = file
+ else
+ # package file is in mode REMOTE: don't pass the `file` to the update
+ # instead, pass the new file path. This will move the file
+ # in object storage.
+ @package_file.new_file_path = File.join(file.store_dir, @package_file.file_name)
+ end
+
+ @package_file.save!
+ end
+
+ private
+
+ def check_params
+ raise ArgumentError, 'package_file not persisted' unless @package_file.persisted?
+ raise ArgumentError, 'package_id and file_name are blank' if package_id.blank? && file_name.blank?
+ end
+
+ def same_as_params?
+ return false if package_id && package_id != @package_file.package_id
+ return false if file_name && file_name != @package_file.file_name
+
+ true
+ end
+
+ def package_id
+ @params[:package_id]
+ end
+
+ def file_name
+ @params[:file_name]
+ end
+ end
+end
diff --git a/app/services/packages/update_tags_service.rb b/app/services/packages/update_tags_service.rb
index adfad52910e..2bdf75a6617 100644
--- a/app/services/packages/update_tags_service.rb
+++ b/app/services/packages/update_tags_service.rb
@@ -15,7 +15,7 @@ module Packages
tags_to_create = @tags - existing_tags
@package.tags.with_name(tags_to_destroy).delete_all if tags_to_destroy.any?
- ::Gitlab::Database.bulk_insert(Packages::Tag.table_name, rows(tags_to_create)) if tags_to_create.any? # rubocop:disable Gitlab/BulkInsert
+ ::Gitlab::Database.main.bulk_insert(Packages::Tag.table_name, rows(tags_to_create)) if tags_to_create.any? # rubocop:disable Gitlab/BulkInsert
end
private
diff --git a/app/services/post_receive_service.rb b/app/services/post_receive_service.rb
index a6d49f03c0b..f5638b0aa40 100644
--- a/app/services/post_receive_service.rb
+++ b/app/services/post_receive_service.rb
@@ -17,13 +17,18 @@ class PostReceiveService
response = Gitlab::InternalPostReceive::Response.new
push_options = Gitlab::PushOptions.new(params[:push_options])
+ mr_options = push_options.get(:merge_request)
response.reference_counter_decreased = Gitlab::ReferenceCounter.new(params[:gl_repository]).decrease
+ # The PostReceive worker will normally invalidate the cache. However, it
+ # runs asynchronously. If push options require us to create a new merge
+ # request synchronously, we can't rely on that, so invalidate the cache here
+ repository&.expire_branches_cache if mr_options&.fetch(:create, false)
+
PostReceive.perform_async(params[:gl_repository], params[:identifier],
params[:changes], push_options.as_json)
- mr_options = push_options.get(:merge_request)
if mr_options.present?
message = process_mr_push_options(mr_options, params[:changes])
response.add_alert_message(message)
diff --git a/app/services/projects/after_rename_service.rb b/app/services/projects/after_rename_service.rb
index 6d389035922..953b386b754 100644
--- a/app/services/projects/after_rename_service.rb
+++ b/app/services/projects/after_rename_service.rb
@@ -83,7 +83,7 @@ module Projects
def update_repository_configuration
project.reload_repository!
- project.write_repository_config
+ project.set_full_path
project.track_project_repository
end
diff --git a/app/services/projects/cleanup_service.rb b/app/services/projects/cleanup_service.rb
index 5eafa5f9b29..75be3425029 100644
--- a/app/services/projects/cleanup_service.rb
+++ b/app/services/projects/cleanup_service.rb
@@ -65,7 +65,7 @@ module Projects
def cleanup_diffs(response)
old_commit_shas = extract_old_commit_shas(response.entries)
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
cleanup_merge_request_diffs(old_commit_shas)
cleanup_note_diff_files(old_commit_shas)
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 9a5c260e488..302c047a65f 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -65,7 +65,7 @@ module Projects
save_project_and_import_data
- Gitlab::ApplicationContext.with_context(related_class: "Projects::CreateService", project: @project) do
+ Gitlab::ApplicationContext.with_context(project: @project) do
after_create_actions if @project.persisted?
import_schedule
@@ -92,7 +92,7 @@ module Projects
# Skip writing the config for project imports/forks because it
# will always fail since the Git directory doesn't exist until
# a background job creates it (see Project#add_import_job).
- @project.write_repository_config unless @project.import?
+ @project.set_full_path unless @project.import?
unless @project.gitlab_project_import?
@project.create_wiki unless skip_wiki?
@@ -101,6 +101,8 @@ module Projects
@project.track_project_repository
@project.create_project_setting unless @project.project_setting
+ yield if block_given?
+
event_service.create_project(@project, current_user)
system_hook_service.execute_hooks_for(@project, :create)
@@ -162,7 +164,7 @@ module Projects
@project.create_or_update_import_data(data: @import_data[:data], credentials: @import_data[:credentials]) if @import_data
if @project.save
- Integration.create_from_active_default_integrations(@project, :project_id, with_templates: true)
+ Integration.create_from_active_default_integrations(@project, :project_id)
@project.create_labels unless @project.gitlab_project_import?
diff --git a/app/services/projects/detect_repository_languages_service.rb b/app/services/projects/detect_repository_languages_service.rb
index c57773c3302..0356a6b0ccd 100644
--- a/app/services/projects/detect_repository_languages_service.rb
+++ b/app/services/projects/detect_repository_languages_service.rb
@@ -21,7 +21,7 @@ module Projects
.update_all(share: update[:share])
end
- Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ Gitlab::Database.main.bulk_insert( # rubocop:disable Gitlab/BulkInsert
RepositoryLanguage.table_name,
detection.insertions(matching_programming_languages)
)
diff --git a/app/services/projects/fetch_statistics_increment_service.rb b/app/services/projects/fetch_statistics_increment_service.rb
index b150fd2d9f1..3354a074d1e 100644
--- a/app/services/projects/fetch_statistics_increment_service.rb
+++ b/app/services/projects/fetch_statistics_increment_service.rb
@@ -15,7 +15,7 @@ module Projects
ON CONFLICT (project_id, date) DO UPDATE SET fetch_count = #{table_name}.fetch_count + 1
SQL
- ActiveRecord::Base.connection.execute(increment_fetch_count_sql)
+ ProjectDailyStatistic.connection.execute(increment_fetch_count_sql)
end
private
diff --git a/app/services/projects/hashed_storage/migrate_repository_service.rb b/app/services/projects/hashed_storage/migrate_repository_service.rb
index c7989e04607..b65d0e63fd3 100644
--- a/app/services/projects/hashed_storage/migrate_repository_service.rb
+++ b/app/services/projects/hashed_storage/migrate_repository_service.rb
@@ -14,7 +14,7 @@ module Projects
result = move_repositories
if result
- project.write_repository_config
+ project.set_full_path
project.track_project_repository
else
rollback_folder_move
diff --git a/app/services/projects/hashed_storage/rollback_repository_service.rb b/app/services/projects/hashed_storage/rollback_repository_service.rb
index 6ab49630603..f4146ff9158 100644
--- a/app/services/projects/hashed_storage/rollback_repository_service.rb
+++ b/app/services/projects/hashed_storage/rollback_repository_service.rb
@@ -14,7 +14,7 @@ module Projects
result = move_repositories
if result
- project.write_repository_config
+ project.set_full_path
project.track_project_repository
else
rollback_folder_move
diff --git a/app/services/projects/lfs_pointers/lfs_link_service.rb b/app/services/projects/lfs_pointers/lfs_link_service.rb
index e86106f0a09..7c00b9e6105 100644
--- a/app/services/projects/lfs_pointers/lfs_link_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_link_service.rb
@@ -38,7 +38,7 @@ module Projects
rows = existent_lfs_objects
.not_linked_to_project(project)
.map { |existing_lfs_object| { project_id: project.id, lfs_object_id: existing_lfs_object.id } }
- Gitlab::Database.bulk_insert(:lfs_objects_projects, rows) # rubocop:disable Gitlab/BulkInsert
+ Gitlab::Database.main.bulk_insert(:lfs_objects_projects, rows) # rubocop:disable Gitlab/BulkInsert
iterations += 1
linked_existing_objects += existent_lfs_objects.map(&:oid)
diff --git a/app/services/projects/lfs_pointers/lfs_object_download_list_service.rb b/app/services/projects/lfs_pointers/lfs_object_download_list_service.rb
index 75106297043..b4872cd9442 100644
--- a/app/services/projects/lfs_pointers/lfs_object_download_list_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_object_download_list_service.rb
@@ -31,7 +31,7 @@ module Projects
#
LfsDownloadLinkListService
.new(project, remote_uri: current_endpoint_uri)
- .execute(lfs_pointers_in_repository)
+ .execute(missing_lfs_files)
rescue LfsDownloadLinkListService::DownloadLinksError => e
raise LfsObjectDownloadListError, "The LFS objects download list couldn't be imported. Error: #{e.message}"
end
@@ -53,6 +53,22 @@ module Projects
@lfs_pointers_in_repository ||= LfsListService.new(project).execute
end
+ def existing_lfs_objects
+ project.lfs_objects
+ end
+
+ def existing_lfs_objects_hash
+ {}.tap do |hash|
+ existing_lfs_objects.find_each do |lfs_object|
+ hash[lfs_object.oid] = lfs_object.size
+ end
+ end
+ end
+
+ def missing_lfs_files
+ lfs_pointers_in_repository.except(*existing_lfs_objects_hash.keys)
+ end
+
def lfsconfig_endpoint_uri
strong_memoize(:lfsconfig_endpoint_uri) do
# Retrieveing the blob data from the .lfsconfig file
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index 2cc6bcdf57c..51b8e3c6c54 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -63,8 +63,15 @@ module Projects
# Make sure we're converting to symbols because
# * ActionController::Parameters#keys returns a list of strings
# * in specs we're using hashes with symbols as keys
+ update_keys = settings.keys.map(&:to_sym)
- settings.keys.map(&:to_sym) == %i[enabled]
+ # Integrated error tracking works without Sentry integration,
+ # so we don't need to update all those values from error_tracking_params_for_update method.
+ # Instead we turn it on/off with partial update together with "enabled" attribute.
+ # But since its optional, we exclude it from the condition below.
+ update_keys.delete(:integrated)
+
+ update_keys == %i[enabled]
end
def error_tracking_params_for_partial_update(settings)
diff --git a/app/services/projects/overwrite_project_service.rb b/app/services/projects/overwrite_project_service.rb
index 6be3b1b5a6f..f35370c427f 100644
--- a/app/services/projects/overwrite_project_service.rb
+++ b/app/services/projects/overwrite_project_service.rb
@@ -5,6 +5,8 @@ module Projects
def execute(source_project)
return unless source_project && source_project.namespace == @project.namespace
+ start_time = ::Gitlab::Metrics::System.monotonic_time
+
Project.transaction do
move_before_destroy_relationships(source_project)
# Reset is required in order to get the proper
@@ -25,10 +27,25 @@ module Projects
else
raise
end
+
+ ensure
+ track_service(start_time, source_project, e)
end
private
+ def track_service(start_time, source_project, exception)
+ return if ::Feature.disabled?(:project_overwrite_service_tracking, source_project, default_enabled: :yaml)
+
+ duration = ::Gitlab::Metrics::System.monotonic_time - start_time
+
+ Gitlab::AppJsonLogger.info(class: self.class.name,
+ namespace_id: source_project.namespace.id,
+ project_id: source_project.id,
+ duration_s: duration.to_f,
+ error: exception.class.name)
+ end
+
def move_before_destroy_relationships(source_project)
options = { remove_remaining_elements: false }
diff --git a/app/services/projects/protect_default_branch_service.rb b/app/services/projects/protect_default_branch_service.rb
index 0111b9e377a..03d1c49657d 100644
--- a/app/services/projects/protect_default_branch_service.rb
+++ b/app/services/projects/protect_default_branch_service.rb
@@ -69,3 +69,5 @@ module Projects
end
end
end
+
+Projects::ProtectDefaultBranchService.prepend_mod
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index fb0fea756bc..074550e104d 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -89,6 +89,8 @@ module Projects
update_integrations
+ remove_paid_features
+
project.old_path_with_namespace = @old_path
update_repository_configuration(@new_path)
@@ -109,6 +111,10 @@ module Projects
move_pages(project)
end
+ # Overridden in EE
+ def remove_paid_features
+ end
+
def transfer_missing_group_resources(group)
Labels::TransferService.new(current_user, group, project).execute
@@ -129,7 +135,7 @@ module Projects
end
def update_repository_configuration(full_path)
- project.write_repository_config(gl_full_path: full_path)
+ project.set_full_path(gl_full_path: full_path)
project.track_project_repository
end
@@ -235,7 +241,7 @@ module Projects
end
def update_integrations
- project.integrations.inherit.delete_all
+ project.integrations.with_default_settings.delete_all
Integration.create_from_active_default_integrations(project, :project_id)
end
end
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index a90c22c7de5..f96a6ee1255 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -37,14 +37,13 @@ module Projects
job.run!
end
- raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
- raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
+ validate_state!
+ validate_max_size!
+ validate_max_entries!
build.artifacts_file.use_file do |artifacts_path|
deploy_to_legacy_storage(artifacts_path)
-
create_pages_deployment(artifacts_path, build)
-
success
end
rescue InvalidStateError => e
@@ -92,8 +91,10 @@ module Projects
# Check if we did extract public directory
archive_public_path = File.join(tmp_path, PUBLIC_DIR)
+
raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
- raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
+
+ validate_outdated_sha!
deploy_page!(archive_public_path)
end
@@ -108,15 +109,6 @@ module Projects
end
def extract_zip_archive!(artifacts_path, temp_path)
- raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
-
- # Calculate page size after extract
- public_entry = build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true)
-
- if public_entry.total_size > max_size
- raise InvalidStateError, "artifacts for pages are too large: #{public_entry.total_size}"
- end
-
SafeZip::Extract.new(artifacts_path)
.extract(directories: [PUBLIC_DIR], to: temp_path)
rescue SafeZip::Extract::Error => e
@@ -151,19 +143,17 @@ module Projects
end
def create_pages_deployment(artifacts_path, build)
- # we're using the full archive and pages daemon needs to read it
- # so we want the total count from entries, not only "public/" directory
- # because it better approximates work we need to do before we can serve the site
- entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
sha256 = build.job_artifacts_archive.file_sha256
deployment = nil
File.open(artifacts_path) do |file|
deployment = project.pages_deployments.create!(file: file,
file_count: entries_count,
- file_sha256: sha256)
+ file_sha256: sha256,
+ ci_build_id: build.id
+ )
- raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
+ validate_outdated_sha!
project.update_pages_deployment!(deployment)
end
@@ -175,29 +165,6 @@ module Projects
)
end
- def latest?
- # check if sha for the ref is still the most recent one
- # this helps in case when multiple deployments happens
- sha == latest_sha
- end
-
- def blocks
- # Calculate dd parameters: we limit the size of pages
- 1 + max_size / BLOCK_SIZE
- end
-
- def max_size_from_settings
- Gitlab::CurrentSettings.max_pages_size.megabytes
- end
-
- def max_size
- max_pages_size = max_size_from_settings
-
- return ::Gitlab::Pages::MAX_SIZE if max_pages_size == 0
-
- max_pages_size
- end
-
def tmp_path
@tmp_path ||= File.join(::Settings.pages.path, TMP_EXTRACT_PATH)
end
@@ -262,6 +229,73 @@ module Projects
def tmp_dir_prefix
"project-#{project.id}-build-#{build.id}-"
end
+
+ def validate_state!
+ raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
+ raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
+
+ validate_outdated_sha!
+ end
+
+ def validate_outdated_sha!
+ return if latest?
+
+ if Feature.enabled?(:pages_smart_check_outdated_sha, project, default_enabled: :yaml)
+ # use pipeline_id in case the build is retried
+ last_deployed_pipeline_id = project.pages_metadatum&.pages_deployment&.ci_build&.pipeline_id
+
+ return unless last_deployed_pipeline_id
+ return if last_deployed_pipeline_id <= build.pipeline_id
+ end
+
+ raise InvalidStateError, 'build SHA is outdated for this ref'
+ end
+
+ def latest?
+ # check if sha for the ref is still the most recent one
+ # this helps in case when multiple deployments happens
+ sha == latest_sha
+ end
+
+ def validate_max_size!
+ if total_size > max_size
+ raise InvalidStateError, "artifacts for pages are too large: #{total_size}"
+ end
+ end
+
+ # Calculate page size after extract
+ def total_size
+ @total_size ||= build.artifacts_metadata_entry(PUBLIC_DIR + '/', recursive: true).total_size
+ end
+
+ def max_size_from_settings
+ Gitlab::CurrentSettings.max_pages_size.megabytes
+ end
+
+ def max_size
+ max_pages_size = max_size_from_settings
+
+ return ::Gitlab::Pages::MAX_SIZE if max_pages_size == 0
+
+ max_pages_size
+ end
+
+ def validate_max_entries!
+ if pages_file_entries_limit > 0 && entries_count > pages_file_entries_limit
+ raise InvalidStateError, "pages site contains #{entries_count} file entries, while limit is set to #{pages_file_entries_limit}"
+ end
+ end
+
+ def entries_count
+ # we're using the full archive and pages daemon needs to read it
+ # so we want the total count from entries, not only "public/" directory
+ # because it better approximates work we need to do before we can serve the site
+ @entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
+ end
+
+ def pages_file_entries_limit
+ project.actual_limits.pages_file_entries
+ end
end
end
diff --git a/app/services/projects/update_remote_mirror_service.rb b/app/services/projects/update_remote_mirror_service.rb
index 6c29ba81910..898125c181c 100644
--- a/app/services/projects/update_remote_mirror_service.rb
+++ b/app/services/projects/update_remote_mirror_service.rb
@@ -43,12 +43,7 @@ module Projects
# LFS objects must be sent first, or the push has dangling pointers
send_lfs_objects!(remote_mirror)
- response = if Feature.enabled?(:update_remote_mirror_inmemory, project, default_enabled: :yaml)
- remote_mirror.update_repository(inmemory_remote: true)
- else
- remote_mirror.ensure_remote!
- remote_mirror.update_repository(inmemory_remote: false)
- end
+ response = remote_mirror.update_repository
if response.divergent_refs.any?
message = "Some refs have diverged and have not been updated on the remote:"
diff --git a/app/services/releases/base_service.rb b/app/services/releases/base_service.rb
index b4b493624e7..249333e6d13 100644
--- a/app/services/releases/base_service.rb
+++ b/app/services/releases/base_service.rb
@@ -83,15 +83,6 @@ module Releases
release.execute_hooks(action)
end
- def track_protected_tag_access_error!
- unless ::Gitlab::UserAccess.new(current_user, container: project).can_create_tag?(tag_name)
- Gitlab::ErrorTracking.log_exception(
- ReleaseProtectedTagAccessError.new,
- project_id: project.id,
- user_id: current_user.id)
- end
- end
-
# overridden in EE
def project_group_id; end
end
diff --git a/app/services/releases/create_service.rb b/app/services/releases/create_service.rb
index 2aac5644b84..caa6a003205 100644
--- a/app/services/releases/create_service.rb
+++ b/app/services/releases/create_service.rb
@@ -7,8 +7,6 @@ module Releases
return error('Release already exists', 409) if release
return error("Milestone(s) not found: #{inexistent_milestones.join(', ')}", 400) if inexistent_milestones.any?
- track_protected_tag_access_error!
-
# should be found before the creation of new tag
# because tag creation can spawn new pipeline
# which won't have any data for evidence yet
@@ -48,8 +46,6 @@ module Releases
end
def can_create_tag?
- return true unless ::Feature.enabled?(:evalute_protected_tag_for_release_permissions, project, default_enabled: :yaml)
-
::Gitlab::UserAccess.new(current_user, container: project).can_create_tag?(tag_name)
end
diff --git a/app/services/releases/destroy_service.rb b/app/services/releases/destroy_service.rb
index 36cf29c955d..8abf9308689 100644
--- a/app/services/releases/destroy_service.rb
+++ b/app/services/releases/destroy_service.rb
@@ -6,8 +6,6 @@ module Releases
return error('Release does not exist', 404) unless release
return error('Access Denied', 403) unless allowed?
- track_protected_tag_access_error!
-
if release.destroy
success(tag: existing_tag, release: release)
else
diff --git a/app/services/releases/update_service.rb b/app/services/releases/update_service.rb
index eda4b7102c0..2e0a2f8488a 100644
--- a/app/services/releases/update_service.rb
+++ b/app/services/releases/update_service.rb
@@ -7,8 +7,6 @@ module Releases
return error
end
- track_protected_tag_access_error!
-
if param_for_milestone_titles_provided?
previous_milestones = release.milestones.map(&:title)
params[:milestones] = milestones
@@ -18,7 +16,7 @@ module Releases
# when it does assign_attributes instead of actual saving
# this leads to the validation error being raised
# see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43385
- ActiveRecord::Base.transaction do
+ ApplicationRecord.transaction do
if release.update(params)
execute_hooks(release, 'update')
success(tag: existing_tag, release: release, milestones_updated: milestones_updated?(previous_milestones))
diff --git a/app/services/resource_events/change_labels_service.rb b/app/services/resource_events/change_labels_service.rb
index 3797d41a5df..bc2d3a946cc 100644
--- a/app/services/resource_events/change_labels_service.rb
+++ b/app/services/resource_events/change_labels_service.rb
@@ -23,7 +23,7 @@ module ResourceEvents
label_hash.merge(label_id: label.id, action: ResourceLabelEvent.actions['remove'])
end
- Gitlab::Database.bulk_insert(ResourceLabelEvent.table_name, labels) # rubocop:disable Gitlab/BulkInsert
+ Gitlab::Database.main.bulk_insert(ResourceLabelEvent.table_name, labels) # rubocop:disable Gitlab/BulkInsert
resource.expire_note_etag_cache
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_label_changed_action(author: user) if resource.is_a?(Issue)
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index 661aafc70cd..fe11820fb54 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -22,8 +22,9 @@ module Search
filters: { state: params[:state], confidential: params[:confidential] })
end
+ # rubocop: disable CodeReuse/ActiveRecord
def projects
- @projects ||= ProjectsFinder.new(params: { non_archived: true }, current_user: current_user).execute
+ @projects ||= ProjectsFinder.new(params: { non_archived: true }, current_user: current_user).execute.includes(:topics, :taggings)
end
def allowed_scopes
diff --git a/app/services/security/merge_reports_service.rb b/app/services/security/merge_reports_service.rb
new file mode 100644
index 00000000000..5f6f98a3c39
--- /dev/null
+++ b/app/services/security/merge_reports_service.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module Security
+ class MergeReportsService
+ attr_reader :source_reports
+
+ def initialize(*source_reports)
+ @source_reports = source_reports
+ end
+
+ def execute
+ copy_resources_to_target_report
+ copy_findings_to_target
+ target_report
+ end
+
+ private
+
+ def target_report
+ @target_report ||= ::Gitlab::Ci::Reports::Security::Report.new(
+ source_reports.first.type,
+ source_reports.first.pipeline,
+ source_reports.first.created_at
+ ).tap { |report| report.errors = source_reports.flat_map(&:errors) }
+ end
+
+ def copy_resources_to_target_report
+ sorted_source_reports.each do |source_report|
+ copy_scanners_to_target(source_report)
+ copy_identifiers_to_target(source_report)
+ copy_scanned_resources_to_target(source_report)
+ end
+ end
+
+ def sorted_source_reports
+ source_reports.sort { |a, b| a.primary_scanner_order_to(b) }
+ end
+
+ def copy_scanners_to_target(source_report)
+ # no need for de-duping: it's done by Report internally
+ source_report.scanners.values.each { |scanner| target_report.add_scanner(scanner) }
+ end
+
+ def copy_identifiers_to_target(source_report)
+ # no need for de-duping: it's done by Report internally
+ source_report.identifiers.values.each { |identifier| target_report.add_identifier(identifier) }
+ end
+
+ def copy_scanned_resources_to_target(source_report)
+ target_report.scanned_resources.concat(source_report.scanned_resources).uniq!
+ end
+
+ def copy_findings_to_target
+ deduplicated_findings.sort.each { |finding| target_report.add_finding(finding) }
+ end
+
+ def deduplicated_findings
+ prioritized_findings.each_with_object([[], Set.new]) do |finding, (deduplicated, seen_identifiers)|
+ next if seen_identifiers.intersect?(finding.keys.to_set)
+
+ seen_identifiers.merge(finding.keys)
+ deduplicated << finding
+ end.first
+ end
+
+ def prioritized_findings
+ source_reports.flat_map(&:findings).sort { |a, b| a.scanner_order_to(b) }
+ end
+ end
+end
diff --git a/app/services/service_ping/permit_data_categories_service.rb b/app/services/service_ping/permit_data_categories_service.rb
index ff48c022b56..d8fa255a485 100644
--- a/app/services/service_ping/permit_data_categories_service.rb
+++ b/app/services/service_ping/permit_data_categories_service.rb
@@ -2,10 +2,10 @@
module ServicePing
class PermitDataCategoriesService
- STANDARD_CATEGORY = 'Standard'
- SUBSCRIPTION_CATEGORY = 'Subscription'
- OPERATIONAL_CATEGORY = 'Operational'
- OPTIONAL_CATEGORY = 'Optional'
+ STANDARD_CATEGORY = 'standard'
+ SUBSCRIPTION_CATEGORY = 'subscription'
+ OPERATIONAL_CATEGORY = 'operational'
+ OPTIONAL_CATEGORY = 'optional'
CATEGORIES = [
STANDARD_CATEGORY,
SUBSCRIPTION_CATEGORY,
@@ -14,20 +14,10 @@ module ServicePing
].to_set.freeze
def execute
- return [] unless product_intelligence_enabled?
+ return [] unless ServicePingSettings.product_intelligence_enabled?
CATEGORIES
end
-
- def product_intelligence_enabled?
- pings_enabled? && !User.single_user&.requires_usage_stats_consent?
- end
-
- private
-
- def pings_enabled?
- ::Gitlab::CurrentSettings.usage_ping_enabled?
- end
end
end
diff --git a/app/services/service_ping/service_ping_settings.rb b/app/services/service_ping/service_ping_settings.rb
new file mode 100644
index 00000000000..6964210b1db
--- /dev/null
+++ b/app/services/service_ping/service_ping_settings.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ServicePing
+ module ServicePingSettings
+ extend self
+
+ def product_intelligence_enabled?
+ enabled? && !User.single_user&.requires_usage_stats_consent?
+ end
+
+ def enabled?
+ ::Gitlab::CurrentSettings.usage_ping_enabled?
+ end
+ end
+end
+
+ServicePing::ServicePingSettings.extend_mod_with('ServicePing::ServicePingSettings')
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index 5c03aa46e18..09d1670fd1f 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -18,7 +18,7 @@ module ServicePing
SubmissionError = Class.new(StandardError)
def execute
- return unless ServicePing::PermitDataCategoriesService.new.product_intelligence_enabled?
+ return unless ServicePing::ServicePingSettings.product_intelligence_enabled?
begin
usage_data = BuildPayloadService.new.execute
diff --git a/app/services/spam/mark_as_spam_service.rb b/app/services/spam/akismet_mark_as_spam_service.rb
index ed5e674d8e9..da5506b9a21 100644
--- a/app/services/spam/mark_as_spam_service.rb
+++ b/app/services/spam/akismet_mark_as_spam_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Spam
- class MarkAsSpamService
+ class AkismetMarkAsSpamService
include ::AkismetMethods
attr_accessor :target, :options
@@ -9,12 +9,12 @@ module Spam
def initialize(target:)
@target = target
@options = {}
+ end
+ def execute
@options[:ip_address] = @target.ip_address
@options[:user_agent] = @target.user_agent
- end
- def execute
return unless target.submittable_as_spam?
return unless akismet.submit_spam
diff --git a/app/services/spam/spam_action_service.rb b/app/services/spam/spam_action_service.rb
index ec16ce19cf6..2a28b66f09b 100644
--- a/app/services/spam/spam_action_service.rb
+++ b/app/services/spam/spam_action_service.rb
@@ -28,7 +28,7 @@ module Spam
ServiceResponse.success(message: "CAPTCHA successfully verified")
else
return ServiceResponse.success(message: 'Skipped spam check because user was allowlisted') if allowlisted?(user)
- return ServiceResponse.success(message: 'Skipped spam check because it was not required') unless check_for_spam?
+ return ServiceResponse.success(message: 'Skipped spam check because it was not required') unless check_for_spam?(user: user)
perform_spam_service_check
ServiceResponse.success(message: "Spam check performed. Check #{target.class.name} spammable model for any errors or CAPTCHA requirement")
@@ -94,7 +94,7 @@ module Spam
def create_spam_log
@spam_log = SpamLog.create!(
{
- user_id: target.author_id,
+ user_id: user.id,
title: target.spam_title,
description: target.spam_description,
source_ip: spam_params.ip_address,
diff --git a/app/services/suggestions/create_service.rb b/app/services/suggestions/create_service.rb
index a97c36fa0ca..c5f9fa1eee0 100644
--- a/app/services/suggestions/create_service.rb
+++ b/app/services/suggestions/create_service.rb
@@ -25,7 +25,7 @@ module Suggestions
end
rows.in_groups_of(100, false) do |rows|
- Gitlab::Database.bulk_insert('suggestions', rows) # rubocop:disable Gitlab/BulkInsert
+ Gitlab::Database.main.bulk_insert('suggestions', rows) # rubocop:disable Gitlab/BulkInsert
end
Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter.track_add_suggestion_action(user: @note.author)
diff --git a/app/services/todos/destroy/destroyed_issuable_service.rb b/app/services/todos/destroy/destroyed_issuable_service.rb
index db12965224b..7a85b59eeea 100644
--- a/app/services/todos/destroy/destroyed_issuable_service.rb
+++ b/app/services/todos/destroy/destroyed_issuable_service.rb
@@ -20,7 +20,7 @@ module Todos
SQL
loop do
- result = ActiveRecord::Base.connection.execute(delete_query)
+ result = Todo.connection.execute(delete_query)
break if result.cmd_tuples == 0
diff --git a/app/services/todos/destroy/entity_leave_service.rb b/app/services/todos/destroy/entity_leave_service.rb
index dfe14225ade..1fe397d24e7 100644
--- a/app/services/todos/destroy/entity_leave_service.rb
+++ b/app/services/todos/destroy/entity_leave_service.rb
@@ -23,10 +23,10 @@ module Todos
return if user_has_reporter_access?
remove_confidential_resource_todos
+ remove_group_todos
if entity.private?
remove_project_todos
- remove_group_todos
else
enqueue_private_features_worker
end
@@ -68,7 +68,7 @@ module Todos
return unless entity.is_a?(Namespace)
Todo
- .for_group(non_authorized_non_public_groups)
+ .for_group(unauthorized_private_groups)
.for_user(user)
.delete_all
end
@@ -104,16 +104,13 @@ module Todos
GroupsFinder.new(user, min_access_level: Gitlab::Access::REPORTER).execute.select(:id)
end
- # since the entity is a private group, we can assume all subgroups are also
- # private. We can therefore limit GroupsFinder with `all_available: false`.
- # Otherwise it tries to include all public groups. This generates an expensive
- # SQL queries: https://gitlab.com/gitlab-org/gitlab/-/issues/325133
# rubocop: disable CodeReuse/ActiveRecord
- def non_authorized_non_public_groups
+ def unauthorized_private_groups
return [] unless entity.is_a?(Namespace)
- return [] unless entity.private?
- entity.self_and_descendants.select(:id)
+ groups = entity.self_and_descendants.private_only
+
+ groups.select(:id)
.id_not_in(GroupsFinder.new(user, all_available: false).execute.select(:id).reorder(nil))
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/users/ban_service.rb b/app/services/users/ban_service.rb
index 247ed14966b..88e92ebff9b 100644
--- a/app/services/users/ban_service.rb
+++ b/app/services/users/ban_service.rb
@@ -1,25 +1,15 @@
# frozen_string_literal: true
module Users
- class BanService < BaseService
- def initialize(current_user)
- @current_user = current_user
- end
+ class BanService < BannedUserBaseService
+ private
- def execute(user)
- if user.ban
- log_event(user)
- success
- else
- messages = user.errors.full_messages
- error(messages.uniq.join('. '))
- end
+ def update_user(user)
+ user.ban
end
- private
-
- def log_event(user)
- Gitlab::AppLogger.info(message: "User banned", user: "#{user.username}", email: "#{user.email}", banned_by: "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
+ def action
+ :ban
end
end
end
diff --git a/app/services/users/banned_user_base_service.rb b/app/services/users/banned_user_base_service.rb
new file mode 100644
index 00000000000..16041075941
--- /dev/null
+++ b/app/services/users/banned_user_base_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Users
+ class BannedUserBaseService < BaseService
+ def initialize(current_user)
+ @current_user = current_user
+ end
+
+ def execute(user)
+ return permission_error unless allowed?
+
+ if update_user(user)
+ log_event(user)
+ success
+ else
+ messages = user.errors.full_messages
+ error(messages.uniq.join('. '))
+ end
+ end
+
+ private
+
+ attr_reader :current_user
+
+ def allowed?
+ can?(current_user, :admin_all_resources)
+ end
+
+ def permission_error
+ error(_("You are not allowed to %{action} a user" % { action: action.to_s }), :forbidden)
+ end
+
+ def log_event(user)
+ Gitlab::AppLogger.info(message: "User #{action}", user: "#{user.username}", email: "#{user.email}", "#{action}_by": "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
+ end
+ end
+end
diff --git a/app/services/users/set_status_service.rb b/app/services/users/set_status_service.rb
index 2b4be8c833b..d0bb40cbcfb 100644
--- a/app/services/users/set_status_service.rb
+++ b/app/services/users/set_status_service.rb
@@ -28,11 +28,12 @@ module Users
params[:emoji] = UserStatus::DEFAULT_EMOJI if params[:emoji].blank?
params[:availability] = UserStatus.availabilities[:not_set] unless new_user_availability
- user_status.update(params)
+ bump_user if user_status.update(params)
end
def remove_status
- UserStatus.delete(target_user.id)
+ bump_user if UserStatus.delete(target_user.id).nonzero?
+ true
end
def user_status
@@ -48,5 +49,12 @@ module Users
def new_user_availability
UserStatus.availabilities[params[:availability]]
end
+
+ def bump_user
+ # Intentionally not calling `touch` as that will trigger other callbacks
+ # on target_user (e.g. after_touch, after_commit, after_rollback) and we
+ # don't need them to happen here.
+ target_user.update_column(:updated_at, Time.current)
+ end
end
end
diff --git a/app/services/users/unban_service.rb b/app/services/users/unban_service.rb
new file mode 100644
index 00000000000..363783cf240
--- /dev/null
+++ b/app/services/users/unban_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Users
+ class UnbanService < BannedUserBaseService
+ private
+
+ def update_user(user)
+ user.activate
+ end
+
+ def action
+ :unban
+ end
+ end
+end