Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-05-17 19:05:49 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-05-17 19:05:49 +0300
commit43a25d93ebdabea52f99b05e15b06250cd8f07d7 (patch)
treedceebdc68925362117480a5d672bcff122fb625b /app/services
parent20c84b99005abd1c82101dfeff264ac50d2df211 (diff)
Add latest changes from gitlab-org/gitlab@16-0-stable-eev16.0.0-rc42
Diffstat (limited to 'app/services')
-rw-r--r--app/services/achievements/award_service.rb49
-rw-r--r--app/services/achievements/destroy_service.rb33
-rw-r--r--app/services/achievements/revoke_service.rb47
-rw-r--r--app/services/achievements/update_service.rb41
-rw-r--r--app/services/admin/abuse_report_update_service.rb89
-rw-r--r--app/services/auth/container_registry_authentication_service.rb36
-rw-r--r--app/services/authorized_project_update/project_recalculate_service.rb2
-rw-r--r--app/services/base_container_service.rb22
-rw-r--r--app/services/branches/validate_new_service.rb2
-rw-r--r--app/services/bulk_imports/archive_extraction_service.rb11
-rw-r--r--app/services/bulk_imports/batched_relation_export_service.rb91
-rw-r--r--app/services/bulk_imports/create_service.rb89
-rw-r--r--app/services/bulk_imports/export_service.rb19
-rw-r--r--app/services/bulk_imports/file_export_service.rb49
-rw-r--r--app/services/bulk_imports/lfs_objects_export_service.rb14
-rw-r--r--app/services/bulk_imports/relation_batch_export_service.rb80
-rw-r--r--app/services/bulk_imports/relation_export_service.rb54
-rw-r--r--app/services/bulk_imports/repository_bundle_export_service.rb2
-rw-r--r--app/services/bulk_imports/tree_export_service.rb43
-rw-r--r--app/services/bulk_imports/uploads_export_service.rb14
-rw-r--r--app/services/ci/archive_trace_service.rb39
-rw-r--r--app/services/ci/catalog/validate_resource_service.rb46
-rw-r--r--app/services/ci/create_pipeline_service.rb4
-rw-r--r--app/services/ci/ensure_stage_service.rb10
-rw-r--r--app/services/ci/generate_kubeconfig_service.rb2
-rw-r--r--app/services/ci/job_artifacts/bulk_delete_by_project_service.rb73
-rw-r--r--app/services/ci/job_artifacts/create_service.rb107
-rw-r--r--app/services/ci/job_artifacts/destroy_all_expired_service.rb22
-rw-r--r--app/services/ci/job_artifacts/destroy_batch_service.rb21
-rw-r--r--app/services/ci/job_token_scope/add_project_service.rb6
-rw-r--r--app/services/ci/job_token_scope/remove_project_service.rb2
-rw-r--r--app/services/ci/list_config_variables_service.rb9
-rw-r--r--app/services/ci/parse_dotenv_artifact_service.rb9
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service.rb60
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb130
-rw-r--r--app/services/ci/pipeline_schedules/take_ownership_service.rb2
-rw-r--r--app/services/ci/pipelines/add_job_service.rb6
-rw-r--r--app/services/ci/process_build_service.rb34
-rw-r--r--app/services/ci/queue/build_queue_service.rb4
-rw-r--r--app/services/ci/queue/pending_builds_strategy.rb7
-rw-r--r--app/services/ci/register_job_service.rb13
-rw-r--r--app/services/ci/reset_skipped_jobs_service.rb32
-rw-r--r--app/services/ci/runners/create_runner_service.rb27
-rw-r--r--app/services/ci/runners/process_runner_version_update_service.rb5
-rw-r--r--app/services/ci/runners/register_runner_service.rb61
-rw-r--r--app/services/ci/runners/runner_creation_strategies/group_runner_strategy.rb38
-rw-r--r--app/services/ci/runners/runner_creation_strategies/instance_runner_strategy.rb15
-rw-r--r--app/services/ci/runners/runner_creation_strategies/project_runner_strategy.rb38
-rw-r--r--app/services/ci/runners/stale_managers_cleanup_service.rb (renamed from app/services/ci/runners/stale_machines_cleanup_service.rb)8
-rw-r--r--app/services/ci/runners/unregister_runner_manager_service.rb33
-rw-r--r--app/services/ci/runners/unregister_runner_service.rb3
-rw-r--r--app/services/ci/stuck_builds/drop_helpers.rb29
-rw-r--r--app/services/ci/track_failed_build_service.rb2
-rw-r--r--app/services/ci/update_build_queue_service.rb2
-rw-r--r--app/services/clusters/agent_tokens/create_service.rb20
-rw-r--r--app/services/clusters/agent_tokens/revoke_service.rb46
-rw-r--r--app/services/clusters/agents/authorizations/ci_access/filter_service.rb54
-rw-r--r--app/services/clusters/agents/authorizations/ci_access/refresh_service.rb106
-rw-r--r--app/services/clusters/agents/authorizations/user_access/refresh_service.rb108
-rw-r--r--app/services/clusters/agents/authorize_proxy_user_service.rb74
-rw-r--r--app/services/clusters/agents/create_activity_event_service.rb4
-rw-r--r--app/services/clusters/agents/filter_authorizations_service.rb50
-rw-r--r--app/services/clusters/agents/refresh_authorization_service.rb102
-rw-r--r--app/services/clusters/applications/base_helm_service.rb69
-rw-r--r--app/services/commits/change_service.rb20
-rw-r--r--app/services/concerns/exclusive_lease_guard.rb22
-rw-r--r--app/services/concerns/incident_management/usage_data.rb4
-rw-r--r--app/services/concerns/issues/resolve_discussions.rb6
-rw-r--r--app/services/concerns/update_repository_storage_methods.rb9
-rw-r--r--app/services/concerns/work_items/widgetable_service.rb30
-rw-r--r--app/services/container_expiration_policies/cleanup_service.rb1
-rw-r--r--app/services/dependency_proxy/head_manifest_service.rb2
-rw-r--r--app/services/discussions/update_diff_position_service.rb2
-rw-r--r--app/services/draft_notes/publish_service.rb3
-rw-r--r--app/services/environments/stop_service.rb16
-rw-r--r--app/services/error_tracking/list_projects_service.rb4
-rw-r--r--app/services/event_create_service.rb49
-rw-r--r--app/services/feature_flags/base_service.rb34
-rw-r--r--app/services/feature_flags/create_service.rb10
-rw-r--r--app/services/feature_flags/destroy_service.rb10
-rw-r--r--app/services/feature_flags/update_service.rb16
-rw-r--r--app/services/files/base_service.rb15
-rw-r--r--app/services/git/base_hooks_service.rb8
-rw-r--r--app/services/git/branch_hooks_service.rb2
-rw-r--r--app/services/google_cloud/generate_pipeline_service.rb2
-rw-r--r--app/services/groups/autocomplete_service.rb2
-rw-r--r--app/services/groups/group_links/create_service.rb2
-rw-r--r--app/services/groups/group_links/destroy_service.rb4
-rw-r--r--app/services/groups/group_links/update_service.rb4
-rw-r--r--app/services/groups/transfer_service.rb9
-rw-r--r--app/services/import/base_service.rb2
-rw-r--r--app/services/import/bitbucket_server_service.rb2
-rw-r--r--app/services/import/fogbugz_service.rb4
-rw-r--r--app/services/import/github/cancel_project_import_service.rb6
-rw-r--r--app/services/import/github_service.rb2
-rw-r--r--app/services/import/validate_remote_git_endpoint_service.rb4
-rw-r--r--app/services/import_csv/base_service.rb40
-rw-r--r--app/services/incident_management/timeline_events/base_service.rb2
-rw-r--r--app/services/integrations/slack_event_service.rb61
-rw-r--r--app/services/integrations/slack_events/app_home_opened_service.rb92
-rw-r--r--app/services/integrations/slack_events/url_verification_service.rb26
-rw-r--r--app/services/integrations/slack_interaction_service.rb36
-rw-r--r--app/services/integrations/slack_interactions/block_action_service.rb32
-rw-r--r--app/services/integrations/slack_interactions/incident_management/incident_modal_closed_service.rb58
-rw-r--r--app/services/integrations/slack_interactions/incident_management/incident_modal_opened_service.rb105
-rw-r--r--app/services/integrations/slack_interactions/incident_management/incident_modal_submit_service.rb162
-rw-r--r--app/services/integrations/slack_interactions/slack_block_actions/incident_management/project_update_handler.rb131
-rw-r--r--app/services/integrations/slack_option_service.rb42
-rw-r--r--app/services/integrations/slack_options/label_search_handler.rb61
-rw-r--r--app/services/integrations/slack_options/user_search_handler.rb55
-rw-r--r--app/services/issuable/callbacks/base.rb31
-rw-r--r--app/services/issuable/callbacks/milestone.rb81
-rw-r--r--app/services/issuable/clone/base_service.rb5
-rw-r--r--app/services/issuable/destroy_service.rb2
-rw-r--r--app/services/issuable/import_csv/base_service.rb2
-rw-r--r--app/services/issuable_base_service.rb79
-rw-r--r--app/services/issuable_links/create_service.rb17
-rw-r--r--app/services/issue_links/create_service.rb4
-rw-r--r--app/services/issues/after_create_service.rb6
-rw-r--r--app/services/issues/base_service.rb37
-rw-r--r--app/services/issues/build_service.rb63
-rw-r--r--app/services/issues/close_service.rb14
-rw-r--r--app/services/issues/create_service.rb46
-rw-r--r--app/services/issues/duplicate_service.rb5
-rw-r--r--app/services/issues/export_csv_service.rb2
-rw-r--r--app/services/issues/referenced_merge_requests_service.rb12
-rw-r--r--app/services/issues/related_branches_service.rb5
-rw-r--r--app/services/issues/reopen_service.rb17
-rw-r--r--app/services/issues/reorder_service.rb5
-rw-r--r--app/services/issues/update_service.rb55
-rw-r--r--app/services/issues/zoom_link_service.rb2
-rw-r--r--app/services/jira_connect/sync_service.rb4
-rw-r--r--app/services/jira_connect_installations/proxy_lifecycle_event_service.rb6
-rw-r--r--app/services/keys/last_used_service.rb24
-rw-r--r--app/services/keys/revoke_service.rb2
-rw-r--r--app/services/markup/rendering_service.rb2
-rw-r--r--app/services/mattermost/create_team_service.rb2
-rw-r--r--app/services/members/approve_access_request_service.rb2
-rw-r--r--app/services/members/base_service.rb4
-rw-r--r--app/services/members/creator_service.rb96
-rw-r--r--app/services/members/destroy_service.rb59
-rw-r--r--app/services/merge_requests/add_context_service.rb4
-rw-r--r--app/services/merge_requests/after_create_service.rb3
-rw-r--r--app/services/merge_requests/base_service.rb20
-rw-r--r--app/services/merge_requests/build_service.rb24
-rw-r--r--app/services/merge_requests/create_service.rb5
-rw-r--r--app/services/merge_requests/ff_merge_service.rb10
-rw-r--r--app/services/merge_requests/handle_assignees_change_service.rb1
-rw-r--r--app/services/merge_requests/merge_service.rb7
-rw-r--r--app/services/merge_requests/merge_to_ref_service.rb4
-rw-r--r--app/services/merge_requests/mergeability/detailed_merge_status_service.rb7
-rw-r--r--app/services/merge_requests/push_options_handler_service.rb9
-rw-r--r--app/services/merge_requests/rebase_service.rb2
-rw-r--r--app/services/merge_requests/refresh_service.rb15
-rw-r--r--app/services/merge_requests/reload_diffs_service.rb8
-rw-r--r--app/services/merge_requests/retarget_chain_service.rb15
-rw-r--r--app/services/merge_requests/update_service.rb22
-rw-r--r--app/services/metrics/dashboard/annotations/create_service.rb4
-rw-r--r--app/services/metrics/dashboard/annotations/delete_service.rb2
-rw-r--r--app/services/metrics/dashboard/clone_dashboard_service.rb4
-rw-r--r--app/services/metrics/dashboard/self_monitoring_dashboard_service.rb48
-rw-r--r--app/services/metrics/global_metrics_update_service.rb24
-rw-r--r--app/services/metrics_service.rb6
-rw-r--r--app/services/ml/experiment_tracking/candidate_repository.rb19
-rw-r--r--app/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service.rb30
-rw-r--r--app/services/notes/build_service.rb7
-rw-r--r--app/services/notes/create_service.rb54
-rw-r--r--app/services/notes/destroy_service.rb6
-rw-r--r--app/services/notes/quick_actions_service.rb58
-rw-r--r--app/services/notes/update_service.rb6
-rw-r--r--app/services/notification_service.rb40
-rw-r--r--app/services/packages/conan/search_service.rb37
-rw-r--r--app/services/packages/conan/single_package_search_service.rb50
-rw-r--r--app/services/packages/create_event_service.rb9
-rw-r--r--app/services/packages/debian/extract_metadata_service.rb34
-rw-r--r--app/services/packages/debian/find_or_create_incoming_service.rb2
-rw-r--r--app/services/packages/debian/find_or_create_package_service.rb31
-rw-r--r--app/services/packages/debian/generate_distribution_service.rb36
-rw-r--r--app/services/packages/debian/process_package_file_service.rb29
-rw-r--r--app/services/packages/generic/create_package_file_service.rb6
-rw-r--r--app/services/packages/mark_package_for_destruction_service.rb11
-rw-r--r--app/services/packages/mark_packages_for_destruction_service.rb11
-rw-r--r--app/services/packages/maven/find_or_create_package_service.rb63
-rw-r--r--app/services/packages/npm/create_metadata_cache_service.rb53
-rw-r--r--app/services/packages/npm/create_package_service.rb64
-rw-r--r--app/services/packages/npm/deprecate_package_service.rb78
-rw-r--r--app/services/packages/npm/generate_metadata_service.rb111
-rw-r--r--app/services/personal_access_tokens/create_service.rb8
-rw-r--r--app/services/personal_access_tokens/rotate_service.rb49
-rw-r--r--app/services/preview_markdown_service.rb2
-rw-r--r--app/services/projects/android_target_platform_detector_service.rb35
-rw-r--r--app/services/projects/batch_open_merge_requests_count_service.rb18
-rw-r--r--app/services/projects/blame_service.rb69
-rw-r--r--app/services/projects/container_repository/gitlab/cleanup_tags_service.rb4
-rw-r--r--app/services/projects/create_service.rb39
-rw-r--r--app/services/projects/fork_service.rb6
-rw-r--r--app/services/projects/forks/sync_service.rb113
-rw-r--r--app/services/projects/hashed_storage/base_repository_service.rb2
-rw-r--r--app/services/projects/import_export/relation_export_service.rb1
-rw-r--r--app/services/projects/import_service.rb7
-rw-r--r--app/services/projects/lfs_pointers/lfs_download_link_list_service.rb14
-rw-r--r--app/services/projects/lfs_pointers/lfs_link_service.rb12
-rw-r--r--app/services/projects/open_issues_count_service.rb2
-rw-r--r--app/services/projects/open_merge_requests_count_service.rb8
-rw-r--r--app/services/projects/operations/update_service.rb2
-rw-r--r--app/services/projects/overwrite_project_service.rb20
-rw-r--r--app/services/projects/protect_default_branch_service.rb6
-rw-r--r--app/services/projects/transfer_service.rb4
-rw-r--r--app/services/projects/update_pages_service.rb3
-rw-r--r--app/services/projects/update_remote_mirror_service.rb14
-rw-r--r--app/services/projects/update_repository_storage_service.rb13
-rw-r--r--app/services/projects/update_service.rb22
-rw-r--r--app/services/protected_branches/base_service.rb4
-rw-r--r--app/services/protected_branches/cache_service.rb9
-rw-r--r--app/services/quick_actions/interpret_service.rb10
-rw-r--r--app/services/releases/create_service.rb6
-rw-r--r--app/services/releases/links/base_service.rb35
-rw-r--r--app/services/releases/links/create_service.rb25
-rw-r--r--app/services/releases/links/destroy_service.rb24
-rw-r--r--app/services/releases/links/update_service.rb24
-rw-r--r--app/services/resource_access_tokens/create_service.rb44
-rw-r--r--app/services/resource_events/change_labels_service.rb2
-rw-r--r--app/services/security/ci_configuration/base_create_service.rb19
-rw-r--r--app/services/serverless/associate_domain_service.rb30
-rw-r--r--app/services/spam/spam_action_service.rb2
-rw-r--r--app/services/spam/spam_verdict_service.rb52
-rw-r--r--app/services/system_note_service.rb4
-rw-r--r--app/services/system_notes/commit_service.rb56
-rw-r--r--app/services/system_notes/issuables_service.rb6
-rw-r--r--app/services/system_notes/time_tracking_service.rb2
-rw-r--r--app/services/tasks_to_be_done/base_service.rb8
-rw-r--r--app/services/terraform/remote_state_handler.rb8
-rw-r--r--app/services/todo_service.rb34
-rw-r--r--app/services/users/activity_service.rb2
-rw-r--r--app/services/users/approve_service.rb5
-rw-r--r--app/services/users/ban_service.rb2
-rw-r--r--app/services/users/build_service.rb8
-rw-r--r--app/services/users/deactivate_service.rb65
-rw-r--r--app/services/users/email_verification/base_service.rb7
-rw-r--r--app/services/users/email_verification/validate_token_service.rb3
-rw-r--r--app/services/users/unban_service.rb2
-rw-r--r--app/services/users/unblock_service.rb2
-rw-r--r--app/services/users/update_service.rb29
-rw-r--r--app/services/users/upsert_credit_card_validation_service.rb5
-rw-r--r--app/services/users/validate_manual_otp_service.rb3
-rw-r--r--app/services/work_items/create_service.rb10
-rw-r--r--app/services/work_items/export_csv_service.rb18
-rw-r--r--app/services/work_items/import_csv_service.rb116
-rw-r--r--app/services/work_items/parent_links/base_service.rb53
-rw-r--r--app/services/work_items/parent_links/create_service.rb53
-rw-r--r--app/services/work_items/parent_links/destroy_service.rb10
-rw-r--r--app/services/work_items/parent_links/reorder_service.rb39
-rw-r--r--app/services/work_items/prepare_import_csv_service.rb19
-rw-r--r--app/services/work_items/update_service.rb10
-rw-r--r--app/services/work_items/widgets/assignees_service/update_service.rb2
-rw-r--r--app/services/work_items/widgets/award_emoji_service/update_service.rb33
-rw-r--r--app/services/work_items/widgets/base_service.rb12
-rw-r--r--app/services/work_items/widgets/current_user_todos_service/update_service.rb37
-rw-r--r--app/services/work_items/widgets/description_service/update_service.rb2
-rw-r--r--app/services/work_items/widgets/hierarchy_service/base_service.rb4
-rw-r--r--app/services/work_items/widgets/hierarchy_service/update_service.rb60
-rw-r--r--app/services/work_items/widgets/labels_service/update_service.rb5
-rw-r--r--app/services/work_items/widgets/milestone_service/base_service.rb39
-rw-r--r--app/services/work_items/widgets/milestone_service/create_service.rb13
-rw-r--r--app/services/work_items/widgets/milestone_service/update_service.rb13
-rw-r--r--app/services/work_items/widgets/notifications_service/update_service.rb26
-rw-r--r--app/services/work_items/widgets/start_and_due_date_service/update_service.rb2
267 files changed, 5127 insertions, 1767 deletions
diff --git a/app/services/achievements/award_service.rb b/app/services/achievements/award_service.rb
new file mode 100644
index 00000000000..3cefb0442d5
--- /dev/null
+++ b/app/services/achievements/award_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Achievements
+ class AwardService
+ attr_reader :current_user, :achievement_id, :recipient_id
+
+ def initialize(current_user, achievement_id, recipient_id)
+ @current_user = current_user
+ @achievement_id = achievement_id
+ @recipient_id = recipient_id
+ end
+
+ def execute
+ achievement = Achievements::Achievement.find(achievement_id)
+ return error_no_permissions unless allowed?(achievement)
+
+ recipient = User.find(recipient_id)
+
+ user_achievement = Achievements::UserAchievement.create(
+ achievement: achievement,
+ user: recipient,
+ awarded_by_user: current_user)
+ return error_awarding(user_achievement) unless user_achievement.persisted?
+
+ NotificationService.new.new_achievement_email(recipient, achievement).deliver_later
+ ServiceResponse.success(payload: user_achievement)
+ rescue ActiveRecord::RecordNotFound => e
+ error(e.message)
+ end
+
+ private
+
+ def allowed?(achievement)
+ current_user&.can?(:award_achievement, achievement)
+ end
+
+ def error_no_permissions
+ error('You have insufficient permissions to award this achievement')
+ end
+
+ def error_awarding(user_achievement)
+ error(user_achievement&.errors&.full_messages || 'Failed to award achievement')
+ end
+
+ def error(message)
+ ServiceResponse.error(message: Array(message))
+ end
+ end
+end
diff --git a/app/services/achievements/destroy_service.rb b/app/services/achievements/destroy_service.rb
new file mode 100644
index 00000000000..3204adb8e89
--- /dev/null
+++ b/app/services/achievements/destroy_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Achievements
+ class DestroyService
+ attr_reader :current_user, :achievement
+
+ def initialize(current_user, achievement)
+ @current_user = current_user
+ @achievement = achievement
+ end
+
+ def execute
+ return error_no_permissions unless allowed?
+
+ achievement.delete
+ ServiceResponse.success(payload: achievement)
+ end
+
+ private
+
+ def allowed?
+ current_user&.can?(:admin_achievement, achievement)
+ end
+
+ def error_no_permissions
+ error('You have insufficient permissions to delete this achievement')
+ end
+
+ def error(message)
+ ServiceResponse.error(message: Array(message))
+ end
+ end
+end
diff --git a/app/services/achievements/revoke_service.rb b/app/services/achievements/revoke_service.rb
new file mode 100644
index 00000000000..4601622f517
--- /dev/null
+++ b/app/services/achievements/revoke_service.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module Achievements
+ class RevokeService
+ attr_reader :current_user, :user_achievement
+
+ def initialize(current_user, user_achievement)
+ @current_user = current_user
+ @user_achievement = user_achievement
+ end
+
+ def execute
+ return error_no_permissions unless allowed?(user_achievement.achievement)
+ return error_already_revoked if user_achievement.revoked?
+
+ user_achievement.assign_attributes({
+ revoked_by_user_id: current_user.id,
+ revoked_at: Time.zone.now
+ })
+ return error_awarding unless user_achievement.save
+
+ ServiceResponse.success(payload: user_achievement)
+ end
+
+ private
+
+ def allowed?(achievement)
+ current_user&.can?(:award_achievement, achievement)
+ end
+
+ def error_no_permissions
+ error('You have insufficient permissions to revoke this achievement')
+ end
+
+ def error_already_revoked
+ error('This achievement has already been revoked')
+ end
+
+ def error_awarding
+ error(user_achievement&.errors&.full_messages || 'Failed to revoke achievement')
+ end
+
+ def error(message)
+ ServiceResponse.error(message: Array(message))
+ end
+ end
+end
diff --git a/app/services/achievements/update_service.rb b/app/services/achievements/update_service.rb
new file mode 100644
index 00000000000..dcadae8dc3b
--- /dev/null
+++ b/app/services/achievements/update_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Achievements
+ class UpdateService
+ attr_reader :current_user, :achievement, :params
+
+ def initialize(current_user, achievement, params)
+ @current_user = current_user
+ @achievement = achievement
+ @params = params
+ end
+
+ def execute
+ return error_no_permissions unless allowed?
+
+ if achievement.update(params)
+ ServiceResponse.success(payload: achievement)
+ else
+ error_updating
+ end
+ end
+
+ private
+
+ def allowed?
+ current_user&.can?(:admin_achievement, achievement)
+ end
+
+ def error_no_permissions
+ error('You have insufficient permission to update this achievement')
+ end
+
+ def error(message)
+ ServiceResponse.error(payload: achievement, message: Array(message))
+ end
+
+ def error_updating
+ error(achievement&.errors&.full_messages || 'Failed to update achievement')
+ end
+ end
+end
diff --git a/app/services/admin/abuse_report_update_service.rb b/app/services/admin/abuse_report_update_service.rb
new file mode 100644
index 00000000000..5b2ad27ede4
--- /dev/null
+++ b/app/services/admin/abuse_report_update_service.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+module Admin
+ class AbuseReportUpdateService < BaseService
+ attr_reader :abuse_report, :params, :current_user, :action
+
+ def initialize(abuse_report, current_user, params)
+ @abuse_report = abuse_report
+ @current_user = current_user
+ @params = params
+ @action = determine_action
+ end
+
+ def execute
+ return ServiceResponse.error(message: 'Admin is required') unless current_user&.can_admin_all_resources?
+ return ServiceResponse.error(message: 'Action is required') unless action.present?
+
+ result = perform_action
+ if result[:status] == :success
+ close_report_and_record_event
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: result[:message])
+ end
+ end
+
+ private
+
+ def determine_action
+ action = params[:user_action]
+ if action.in?(ResourceEvents::AbuseReportEvent.actions.keys)
+ action.to_sym
+ elsif close_report?
+ :close_report
+ end
+ end
+
+ def perform_action
+ case action
+ when :ban_user then ban_user
+ when :block_user then block_user
+ when :delete_user then delete_user
+ when :close_report then close_report
+ end
+ end
+
+ def ban_user
+ Users::BanService.new(current_user).execute(abuse_report.user)
+ end
+
+ def block_user
+ Users::BlockService.new(current_user).execute(abuse_report.user)
+ end
+
+ def delete_user
+ abuse_report.user.delete_async(deleted_by: current_user)
+ success
+ end
+
+ def close_report
+ abuse_report.closed!
+ success
+ end
+
+ def close_report_and_record_event
+ event = action
+
+ if close_report? && action != :close_report
+ close_report
+ event = "#{action}_and_close_report"
+ end
+
+ record_event(event)
+ end
+
+ def close_report?
+ params[:close].to_s == 'true'
+ end
+
+ def record_event(action)
+ reason = params[:reason]
+ unless reason.in?(ResourceEvents::AbuseReportEvent.reasons.keys)
+ reason = ResourceEvents::AbuseReportEvent.reasons[:other]
+ end
+
+ abuse_report.events.create(action: action, user: current_user, reason: reason, comment: params[:comment])
+ end
+ end
+end
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index 509c2d4d544..3827d199325 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -65,7 +65,12 @@ module Auth
token.expire_time = token_expire_at
token[:access] = names.map do |name|
- { type: type, name: name, actions: actions }
+ {
+ type: type,
+ name: name,
+ actions: actions,
+ meta: access_metadata(path: name)
+ }.compact
end
token.encoded
@@ -75,6 +80,28 @@ module Auth
Time.current + Gitlab::CurrentSettings.container_registry_token_expire_delay.minutes
end
+ def self.access_metadata(project: nil, path: nil)
+ # If the project is not given, try to infer it from the provided path
+ if project.nil?
+ return if path.nil? # If no path is given, return early
+ return if path == 'import' # Ignore the special 'import' path
+
+ # If the path ends with '/*', remove it so we can parse the actual repository path
+ path = path.chomp('/*')
+
+ # Parse the repository project from the path
+ begin
+ project = ContainerRegistry::Path.new(path).repository_project
+ rescue ContainerRegistry::Path::InvalidRegistryPathError
+ # If the path is invalid, gracefully handle the error
+ return
+ end
+ end
+
+ # Return the project path (lowercase) as metadata
+ { project_path: project&.full_path&.downcase }
+ end
+
private
def authorized_token(*accesses)
@@ -138,7 +165,12 @@ module Auth
#
ensure_container_repository!(path, authorized_actions)
- { type: type, name: path.to_s, actions: authorized_actions }
+ {
+ type: type,
+ name: path.to_s,
+ actions: authorized_actions,
+ meta: self.class.access_metadata(project: requested_project)
+ }
end
def actively_importing?(actions, path)
diff --git a/app/services/authorized_project_update/project_recalculate_service.rb b/app/services/authorized_project_update/project_recalculate_service.rb
index 8d60fffd959..cb83dc57478 100644
--- a/app/services/authorized_project_update/project_recalculate_service.rb
+++ b/app/services/authorized_project_update/project_recalculate_service.rb
@@ -82,3 +82,5 @@ module AuthorizedProjectUpdate
end
end
end
+
+AuthorizedProjectUpdate::ProjectRecalculateService.prepend_mod
diff --git a/app/services/base_container_service.rb b/app/services/base_container_service.rb
index 86df0236a7f..f46e8d5ec42 100644
--- a/app/services/base_container_service.rb
+++ b/app/services/base_container_service.rb
@@ -10,13 +10,17 @@
# the top of the original BaseService.
class BaseContainerService
include BaseServiceUtility
+ include ::Gitlab::Utils::StrongMemoize
+ attr_accessor :project, :group
attr_reader :container, :current_user, :params
def initialize(container:, current_user: nil, params: {})
@container = container
@current_user = current_user
@params = params.dup
+
+ handle_container_type(container)
end
def project_container?
@@ -30,4 +34,22 @@ class BaseContainerService
def namespace_container?
container.is_a?(::Namespace)
end
+
+ def project_group
+ project&.group
+ end
+ strong_memoize_attr :project_group
+
+ private
+
+ def handle_container_type(container)
+ case container
+ when Project
+ @project = container
+ when Group
+ @group = container
+ when Namespaces::ProjectNamespace
+ @project = container.project
+ end
+ end
end
diff --git a/app/services/branches/validate_new_service.rb b/app/services/branches/validate_new_service.rb
index e45183d160f..0bee7ffaa66 100644
--- a/app/services/branches/validate_new_service.rb
+++ b/app/services/branches/validate_new_service.rb
@@ -29,3 +29,5 @@ module Branches
end
end
end
+
+Branches::ValidateNewService.prepend_mod
diff --git a/app/services/bulk_imports/archive_extraction_service.rb b/app/services/bulk_imports/archive_extraction_service.rb
index caa40d98a76..fec8fd0e1f5 100644
--- a/app/services/bulk_imports/archive_extraction_service.rb
+++ b/app/services/bulk_imports/archive_extraction_service.rb
@@ -33,7 +33,6 @@ module BulkImports
validate_symlink
extract_archive
- remove_symlinks
tmpdir
end
@@ -60,15 +59,5 @@ module BulkImports
def extract_archive
untar_xf(archive: filepath, dir: tmpdir)
end
-
- def extracted_files
- Dir.glob(File.join(tmpdir, '**', '*'))
- end
-
- def remove_symlinks
- extracted_files.each do |path|
- FileUtils.rm(path) if symlink?(path)
- end
- end
end
end
diff --git a/app/services/bulk_imports/batched_relation_export_service.rb b/app/services/bulk_imports/batched_relation_export_service.rb
new file mode 100644
index 00000000000..778510f2e35
--- /dev/null
+++ b/app/services/bulk_imports/batched_relation_export_service.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class BatchedRelationExportService
+ include Gitlab::Utils::StrongMemoize
+
+ BATCH_SIZE = 1000
+ BATCH_CACHE_KEY = 'bulk_imports/batched_relation_export/%{export_id}/%{batch_id}'
+ CACHE_DURATION = 4.hours
+
+ def self.cache_key(export_id, batch_id)
+ Kernel.format(BATCH_CACHE_KEY, export_id: export_id, batch_id: batch_id)
+ end
+
+ def initialize(user, portable, relation, jid)
+ @user = user
+ @portable = portable
+ @relation = relation
+ @resolved_relation = portable.public_send(relation) # rubocop:disable GitlabSecurity/PublicSend
+ @jid = jid
+ end
+
+ def execute
+ return finish_export! if batches_count == 0
+
+ start_export!
+ export.batches.destroy_all # rubocop: disable Cop/DestroyAll
+ enqueue_batch_exports
+ rescue StandardError => e
+ fail_export!(e)
+ ensure
+ FinishBatchedRelationExportWorker.perform_async(export.id)
+ end
+
+ private
+
+ attr_reader :user, :portable, :relation, :jid, :config, :resolved_relation
+
+ def export
+ @export ||= portable.bulk_import_exports.find_or_create_by!(relation: relation) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
+ def objects_count
+ resolved_relation.count
+ end
+
+ def batches_count
+ objects_count.fdiv(BATCH_SIZE).ceil
+ end
+
+ def start_export!
+ update_export!('start')
+ end
+
+ def finish_export!
+ update_export!('finish')
+ end
+
+ def update_export!(event)
+ export.update!(
+ status_event: event,
+ total_objects_count: objects_count,
+ batched: true,
+ batches_count: batches_count,
+ jid: jid,
+ error: nil
+ )
+ end
+
+ def enqueue_batch_exports
+ resolved_relation.each_batch(of: BATCH_SIZE) do |batch, batch_number|
+ batch_id = find_or_create_batch(batch_number).id
+ ids = batch.pluck(batch.model.primary_key) # rubocop:disable CodeReuse/ActiveRecord
+
+ Gitlab::Cache::Import::Caching.set_add(self.class.cache_key(export.id, batch_id), ids, timeout: CACHE_DURATION)
+
+ RelationBatchExportWorker.perform_async(user.id, batch_id)
+ end
+ end
+
+ def find_or_create_batch(batch_number)
+ export.batches.find_or_create_by!(batch_number: batch_number) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
+ def fail_export!(exception)
+ Gitlab::ErrorTracking.track_exception(exception, portable_id: portable.id, portable_type: portable.class.name)
+
+ export.update!(status_event: 'fail_op', error: exception.message.truncate(255))
+ end
+ end
+end
diff --git a/app/services/bulk_imports/create_service.rb b/app/services/bulk_imports/create_service.rb
index ac019d9ec5b..4c9c59ac504 100644
--- a/app/services/bulk_imports/create_service.rb
+++ b/app/services/bulk_imports/create_service.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-# Entry point of the BulkImport feature.
+# Entry point of the BulkImport/Direct Transfer feature.
# This service receives a Gitlab Instance connection params
-# and a list of groups to be imported.
+# and a list of groups or projects to be imported.
#
# Process topography:
#
@@ -15,18 +15,24 @@
# P1 (sync)
#
# - Create a BulkImport record
-# - Create a BulkImport::Entity for each group to be imported
-# - Enqueue a BulkImportWorker job (P2) to import the given groups (entities)
+# - Create a BulkImport::Entity for each group or project (entities) to be imported
+# - Enqueue a BulkImportWorker job (P2) to import the given entity
#
# Pn (async)
#
# - For each group to be imported (BulkImport::Entity.with_status(:created))
# - Import the group data
# - Create entities for each subgroup of the imported group
-# - Enqueue a BulkImports::CreateService job (Pn) to import the new entities (subgroups)
-#
+# - Create entities for each project of the imported group
+# - Enqueue a BulkImportWorker job (Pn) to import the new entities
+
module BulkImports
class CreateService
+ ENTITY_TYPES_MAPPING = {
+ 'group_entity' => 'groups',
+ 'project_entity' => 'projects'
+ }.freeze
+
attr_reader :current_user, :params, :credentials
def initialize(current_user, params, credentials)
@@ -40,7 +46,12 @@ module BulkImports
bulk_import = create_bulk_import
- Gitlab::Tracking.event(self.class.name, 'create', label: 'bulk_import_group')
+ Gitlab::Tracking.event(
+ self.class.name,
+ 'create',
+ label: 'bulk_import_group',
+ extra: { source_equals_destination: source_equals_destination? }
+ )
BulkImportWorker.perform_async(bulk_import.id)
@@ -57,6 +68,7 @@ module BulkImports
def validate!
client.validate_instance_version!
+ validate_setting_enabled!
client.validate_import_scopes!
end
@@ -73,6 +85,8 @@ module BulkImports
Array.wrap(params).each do |entity_params|
track_access_level(entity_params)
+ validate_destination_namespace(entity_params)
+ validate_destination_slug(entity_params[:destination_slug] || entity_params[:destination_name])
validate_destination_full_path(entity_params)
BulkImports::Entity.create!(
@@ -88,6 +102,28 @@ module BulkImports
end
end
+ def validate_setting_enabled!
+ source_full_path, source_type = Array.wrap(params)[0].values_at(:source_full_path, :source_type)
+ entity_type = ENTITY_TYPES_MAPPING.fetch(source_type)
+ if source_full_path =~ /^[0-9]+$/
+ query = query_type(entity_type)
+ response = graphql_client.execute(
+ graphql_client.parse(query.to_s),
+ { full_path: source_full_path }
+ ).original_hash
+
+ source_entity_identifier = ::GlobalID.parse(response.dig(*query.data_path, 'id')).model_id
+ else
+ source_entity_identifier = ERB::Util.url_encode(source_full_path)
+ end
+
+ client.get("/#{entity_type}/#{source_entity_identifier}/export_relations/status")
+ # the source instance will return a 404 if the feature is disabled as the endpoint won't be available
+ rescue Gitlab::HTTP::BlockedUrlError
+ rescue BulkImports::NetworkError
+ raise ::BulkImports::Error.setting_not_enabled
+ end
+
def track_access_level(entity_params)
Gitlab::Tracking.event(
self.class.name,
@@ -98,6 +134,30 @@ module BulkImports
)
end
+ def source_equals_destination?
+ credentials[:url].starts_with?(Settings.gitlab.base_url)
+ end
+
+ def validate_destination_namespace(entity_params)
+ destination_namespace = entity_params[:destination_namespace]
+ source_type = entity_params[:source_type]
+
+ return if destination_namespace.blank?
+
+ group = Group.find_by_full_path(destination_namespace)
+ if group.nil? ||
+ (source_type == 'group_entity' && !current_user.can?(:create_subgroup, group)) ||
+ (source_type == 'project_entity' && !current_user.can?(:import_projects, group))
+ raise BulkImports::Error.destination_namespace_validation_failure(destination_namespace)
+ end
+ end
+
+ def validate_destination_slug(destination_slug)
+ return if destination_slug =~ Gitlab::Regex.oci_repository_path_regex
+
+ raise BulkImports::Error.destination_slug_validation_failure
+ end
+
def validate_destination_full_path(entity_params)
source_type = entity_params[:source_type]
@@ -140,5 +200,20 @@ module BulkImports
token: @credentials[:access_token]
)
end
+
+ def graphql_client
+ @graphql_client ||= BulkImports::Clients::Graphql.new(
+ url: @credentials[:url],
+ token: @credentials[:access_token]
+ )
+ end
+
+ def query_type(entity_type)
+ if entity_type == 'groups'
+ BulkImports::Groups::Graphql::GetGroupQuery.new(context: nil)
+ else
+ BulkImports::Projects::Graphql::GetProjectQuery.new(context: nil)
+ end
+ end
end
end
diff --git a/app/services/bulk_imports/export_service.rb b/app/services/bulk_imports/export_service.rb
index 33b3a8e187f..1b60a8e0ff3 100644
--- a/app/services/bulk_imports/export_service.rb
+++ b/app/services/bulk_imports/export_service.rb
@@ -2,14 +2,20 @@
module BulkImports
class ExportService
- def initialize(portable:, user:)
+ # @param portable [Project|Group] A project or a group to export.
+ # @param user [User] A user performing the export.
+ # @param batched [Boolean] Whether to export the data in batches.
+ def initialize(portable:, user:, batched: false)
@portable = portable
@current_user = user
+ @batched = batched
end
def execute
+ validate_user_permissions!
+
FileTransfer.config_for(portable).portable_relations.each do |relation|
- RelationExportWorker.perform_async(current_user.id, portable.id, portable.class.name, relation)
+ RelationExportWorker.perform_async(current_user.id, portable.id, portable.class.name, relation, batched)
end
ServiceResponse.success
@@ -22,6 +28,13 @@ module BulkImports
private
- attr_reader :portable, :current_user
+ attr_reader :portable, :current_user, :batched
+
+ def validate_user_permissions!
+ ability = "admin_#{portable.to_ability_name}"
+
+ current_user.can?(ability, portable) ||
+ raise(::Gitlab::ImportExport::Error.permission_error(current_user, portable))
+ end
end
end
diff --git a/app/services/bulk_imports/file_export_service.rb b/app/services/bulk_imports/file_export_service.rb
index b2d114368a1..8b073f65769 100644
--- a/app/services/bulk_imports/file_export_service.rb
+++ b/app/services/bulk_imports/file_export_service.rb
@@ -4,39 +4,58 @@ module BulkImports
class FileExportService
include Gitlab::ImportExport::CommandLineUtil
- def initialize(portable, export_path, relation)
+ SINGLE_OBJECT_RELATIONS = [
+ FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION,
+ FileTransfer::ProjectConfig::DESIGN_BUNDLE_RELATION
+ ].freeze
+
+ def initialize(portable, export_path, relation, user)
@portable = portable
@export_path = export_path
@relation = relation
+ @user = user # not used anywhere in this class at the moment
end
- def execute
- export_service.execute
+ def execute(options = {})
+ export_service.execute(options)
archive_exported_data
end
+ def export_batch(ids)
+ execute(batch_ids: ids)
+ end
+
def exported_filename
"#{relation}.tar"
end
+ def exported_objects_count
+ case relation
+ when *SINGLE_OBJECT_RELATIONS
+ 1
+ else
+ export_service.exported_objects_count
+ end
+ end
+
private
attr_reader :export_path, :portable, :relation
def export_service
- case relation
- when FileTransfer::BaseConfig::UPLOADS_RELATION
- UploadsExportService.new(portable, export_path)
- when FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION
- LfsObjectsExportService.new(portable, export_path)
- when FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION
- RepositoryBundleExportService.new(portable.repository, export_path, relation)
- when FileTransfer::ProjectConfig::DESIGN_BUNDLE_RELATION
- RepositoryBundleExportService.new(portable.design_repository, export_path, relation)
- else
- raise BulkImports::Error, 'Unsupported relation export type'
- end
+ @export_service ||= case relation
+ when FileTransfer::BaseConfig::UPLOADS_RELATION
+ UploadsExportService.new(portable, export_path)
+ when FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION
+ LfsObjectsExportService.new(portable, export_path)
+ when FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION
+ RepositoryBundleExportService.new(portable.repository, export_path, relation)
+ when FileTransfer::ProjectConfig::DESIGN_BUNDLE_RELATION
+ RepositoryBundleExportService.new(portable.design_repository, export_path, relation)
+ else
+ raise BulkImports::Error, 'Unsupported relation export type'
+ end
end
def archive_exported_data
diff --git a/app/services/bulk_imports/lfs_objects_export_service.rb b/app/services/bulk_imports/lfs_objects_export_service.rb
index b3b7cddf2d9..3020e8ababb 100644
--- a/app/services/bulk_imports/lfs_objects_export_service.rb
+++ b/app/services/bulk_imports/lfs_objects_export_service.rb
@@ -6,16 +6,26 @@ module BulkImports
BATCH_SIZE = 100
+ attr_reader :exported_objects_count
+
def initialize(portable, export_path)
@portable = portable
@export_path = export_path
@lfs_json = {}
+ @exported_objects_count = 0
end
- def execute
- portable.lfs_objects.find_in_batches(batch_size: BATCH_SIZE) do |batch| # rubocop: disable CodeReuse/ActiveRecord
+ def execute(options = {})
+ relation = portable.lfs_objects
+
+ if options[:batch_ids]
+ relation = relation.where(relation.model.primary_key => options[:batch_ids]) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
+ relation.find_in_batches(batch_size: BATCH_SIZE) do |batch| # rubocop: disable CodeReuse/ActiveRecord
batch.each do |lfs_object|
save_lfs_object(lfs_object)
+ @exported_objects_count += 1
end
append_lfs_json_for_batch(batch)
diff --git a/app/services/bulk_imports/relation_batch_export_service.rb b/app/services/bulk_imports/relation_batch_export_service.rb
new file mode 100644
index 00000000000..19eb550216d
--- /dev/null
+++ b/app/services/bulk_imports/relation_batch_export_service.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class RelationBatchExportService
+ include Gitlab::ImportExport::CommandLineUtil
+
+ def initialize(user_id, batch_id)
+ @user = User.find(user_id)
+ @batch = BulkImports::ExportBatch.find(batch_id)
+ @config = FileTransfer.config_for(portable)
+ end
+
+ def execute
+ start_batch!
+
+ export_service.export_batch(relation_batch_ids)
+ compress_exported_relation
+ upload_compressed_file
+
+ finish_batch!
+ rescue StandardError => e
+ fail_batch!(e)
+ ensure
+ FileUtils.remove_entry(export_path)
+ end
+
+ private
+
+ attr_reader :user, :batch, :config
+
+ delegate :export_path, to: :config
+ delegate :batch_number, :export, to: :batch
+ delegate :portable, :relation, to: :export
+ delegate :exported_filename, :exported_objects_count, to: :export_service
+
+ def export_service
+ @export_service ||= config.export_service_for(relation).new(portable, export_path, relation, user)
+ end
+
+ def compress_exported_relation
+ gzip(dir: export_path, filename: exported_filename)
+ end
+
+ def upload_compressed_file
+ File.open(compressed_filename) { |file| batch_upload.export_file = file }
+
+ batch_upload.save!
+ end
+
+ def batch_upload
+ @batch_upload ||= ::BulkImports::ExportUpload.find_or_initialize_by(export_id: export.id, batch_id: batch.id) # rubocop: disable CodeReuse/ActiveRecord
+ end
+
+ def compressed_filename
+ File.join(export_path, "#{exported_filename}.gz")
+ end
+
+ def relation_batch_ids
+ Gitlab::Cache::Import::Caching.values_from_set(cache_key).map(&:to_i)
+ end
+
+ def cache_key
+ BulkImports::BatchedRelationExportService.cache_key(export.id, batch.id)
+ end
+
+ def start_batch!
+ batch.update!(status_event: 'start', objects_count: 0, error: nil)
+ end
+
+ def finish_batch!
+ batch.update!(status_event: 'finish', objects_count: exported_objects_count, error: nil)
+ end
+
+ def fail_batch!(exception)
+ Gitlab::ErrorTracking.track_exception(exception, portable_id: portable.id, portable_type: portable.class.name)
+
+ batch.update!(status_event: 'fail_op', error: exception.message.truncate(255))
+ end
+ end
+end
diff --git a/app/services/bulk_imports/relation_export_service.rb b/app/services/bulk_imports/relation_export_service.rb
index b1efa881180..142bc48efe3 100644
--- a/app/services/bulk_imports/relation_export_service.rb
+++ b/app/services/bulk_imports/relation_export_service.rb
@@ -22,36 +22,27 @@ module BulkImports
upload_compressed_file(export)
end
ensure
- FileUtils.remove_entry(config.export_path)
+ FileUtils.remove_entry(export_path)
end
private
attr_reader :user, :portable, :relation, :jid, :config
- def find_or_create_export!
- validate_user_permissions!
+ delegate :export_path, to: :config
+ def find_or_create_export!
export = portable.bulk_import_exports.safe_find_or_create_by!(relation: relation)
- return export if export.finished? && export.updated_at > EXISTING_EXPORT_TTL.ago
+ return export if export.finished? && export.updated_at > EXISTING_EXPORT_TTL.ago && !export.batched?
- export.update!(status_event: 'start', jid: jid)
+ start_export!(export)
yield export
- export.update!(status_event: 'finish', error: nil)
+ finish_export!(export)
rescue StandardError => e
- Gitlab::ErrorTracking.track_exception(e, portable_id: portable.id, portable_type: portable.class.name)
-
- export&.update(status_event: 'fail_op', error: e.class)
- end
-
- def validate_user_permissions!
- ability = "admin_#{portable.to_ability_name}"
-
- user.can?(ability, portable) ||
- raise(::Gitlab::ImportExport::Error.permission_error(user, portable))
+ fail_export!(export, e)
end
def remove_existing_export_file!(export)
@@ -65,16 +56,16 @@ module BulkImports
def export_service
@export_service ||= if config.tree_relation?(relation) || config.self_relation?(relation)
- TreeExportService.new(portable, config.export_path, relation, user)
+ TreeExportService.new(portable, export_path, relation, user)
elsif config.file_relation?(relation)
- FileExportService.new(portable, config.export_path, relation)
+ FileExportService.new(portable, export_path, relation, user)
else
raise BulkImports::Error, 'Unsupported export relation'
end
end
def upload_compressed_file(export)
- compressed_file = File.join(config.export_path, "#{export_service.exported_filename}.gz")
+ compressed_file = File.join(export_path, "#{export_service.exported_filename}.gz")
upload = ExportUpload.find_or_initialize_by(export_id: export.id) # rubocop: disable CodeReuse/ActiveRecord
@@ -84,7 +75,30 @@ module BulkImports
end
def compress_exported_relation
- gzip(dir: config.export_path, filename: export_service.exported_filename)
+ gzip(dir: export_path, filename: export_service.exported_filename)
+ end
+
+ def start_export!(export)
+ export.update!(
+ status_event: 'start',
+ jid: jid,
+ batched: false,
+ batches_count: 0,
+ total_objects_count: 0,
+ error: nil
+ )
+
+ export.batches.destroy_all if export.batches.any? # rubocop:disable Cop/DestroyAll
+ end
+
+ def finish_export!(export)
+ export.update!(status_event: 'finish', batched: false, error: nil)
+ end
+
+ def fail_export!(export, exception)
+ Gitlab::ErrorTracking.track_exception(exception, portable_id: portable.id, portable_type: portable.class.name)
+
+ export&.update(status_event: 'fail_op', error: exception.class, batched: false)
end
end
end
diff --git a/app/services/bulk_imports/repository_bundle_export_service.rb b/app/services/bulk_imports/repository_bundle_export_service.rb
index 86159f5189d..441cced2f7f 100644
--- a/app/services/bulk_imports/repository_bundle_export_service.rb
+++ b/app/services/bulk_imports/repository_bundle_export_service.rb
@@ -8,7 +8,7 @@ module BulkImports
@export_filename = export_filename
end
- def execute
+ def execute(_options = {})
return unless repository_exists?
repository.bundle_to_disk(bundle_filepath)
diff --git a/app/services/bulk_imports/tree_export_service.rb b/app/services/bulk_imports/tree_export_service.rb
index b6f094da558..0aad271f40f 100644
--- a/app/services/bulk_imports/tree_export_service.rb
+++ b/app/services/bulk_imports/tree_export_service.rb
@@ -2,6 +2,10 @@
module BulkImports
class TreeExportService
+ include Gitlab::Utils::StrongMemoize
+
+ delegate :exported_objects_count, to: :serializer
+
def initialize(portable, export_path, relation, user)
@portable = portable
@export_path = export_path
@@ -11,43 +15,52 @@ module BulkImports
end
def execute
- return serializer.serialize_root(config.class::SELF_RELATION) if self_relation?
-
- relation_definition = config.tree_relation_definition_for(relation)
-
- raise BulkImports::Error, 'Unsupported relation export type' unless relation_definition
+ if self_relation?(relation)
+ serializer.serialize_root(config.class::SELF_RELATION)
+ else
+ serializer.serialize_relation(relation_definition)
+ end
+ end
- serializer.serialize_relation(relation_definition)
+ def export_batch(ids)
+ serializer.serialize_relation(relation_definition, batch_ids: Array.wrap(ids))
end
def exported_filename
- return "#{relation}.json" if self_relation?
-
- "#{relation}.ndjson"
+ "#{relation}.#{extension}"
end
private
+ delegate :self_relation?, to: :config
+
attr_reader :export_path, :portable, :relation, :config, :user
# rubocop: disable CodeReuse/Serializer
def serializer
- ::Gitlab::ImportExport::Json::StreamingSerializer.new(
+ @serializer ||= ::Gitlab::ImportExport::Json::StreamingSerializer.new(
portable,
config.portable_tree,
- json_writer,
+ ::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path),
exportable_path: '',
current_user: user
)
end
# rubocop: enable CodeReuse/Serializer
- def json_writer
- ::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path)
+ def extension
+ return 'json' if self_relation?(relation)
+
+ 'ndjson'
end
- def self_relation?
- relation == config.class::SELF_RELATION
+ def relation_definition
+ definition = config.tree_relation_definition_for(relation)
+
+ raise BulkImports::Error, 'Unsupported relation export type' unless definition
+
+ definition
end
+ strong_memoize_attr :relation_definition
end
end
diff --git a/app/services/bulk_imports/uploads_export_service.rb b/app/services/bulk_imports/uploads_export_service.rb
index 315590bea31..4d55f159af4 100644
--- a/app/services/bulk_imports/uploads_export_service.rb
+++ b/app/services/bulk_imports/uploads_export_service.rb
@@ -7,13 +7,22 @@ module BulkImports
BATCH_SIZE = 100
AVATAR_PATH = 'avatar'
+ attr_reader :exported_objects_count
+
def initialize(portable, export_path)
@portable = portable
@export_path = export_path
+ @exported_objects_count = 0
end
- def execute
- portable.uploads.find_each(batch_size: BATCH_SIZE) do |upload| # rubocop: disable CodeReuse/ActiveRecord
+ def execute(options = {})
+ relation = portable.uploads
+
+ if options[:batch_ids]
+ relation = relation.where(relation.model.primary_key => options[:batch_ids]) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
+ relation.find_each(batch_size: BATCH_SIZE) do |upload| # rubocop: disable CodeReuse/ActiveRecord
uploader = upload.retrieve_uploader
next unless upload.exist?
@@ -22,6 +31,7 @@ module BulkImports
subdir_path = export_subdir_path(upload)
mkdir_p(subdir_path)
download_or_copy_upload(uploader, File.join(subdir_path, uploader.filename))
+ @exported_objects_count += 1
rescue StandardError => e
# Do not fail entire project export if something goes wrong during file download
# (e.g. downloaded file has filename that exceeds 255 characters).
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index 4b62580e670..e370f85fa96 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -45,29 +45,12 @@ module Ci
return
end
- # TODO: Remove this logging once we confirmed new live trace architecture is functional.
- # See https://gitlab.com/gitlab-com/gl-infra/infrastructure/issues/4667.
- unless job.has_live_trace?
- Sidekiq.logger.warn(class: worker_name,
- message: 'The job does not have live trace but going to be archived.',
- job_id: job.id)
- return
- end
-
job.trace.archive!
job.remove_pending_state!
if job.job_artifacts_trace.present?
job.project.execute_integrations(Gitlab::DataBuilder::ArchiveTrace.build(job), :archive_trace_hooks)
end
-
- # TODO: Remove this logging once we confirmed new live trace architecture is functional.
- # See https://gitlab.com/gitlab-com/gl-infra/infrastructure/issues/4667.
- unless job.has_archived_trace?
- Sidekiq.logger.warn(class: worker_name,
- message: 'The job does not have archived trace after archiving.',
- job_id: job.id)
- end
rescue ::Gitlab::Ci::Trace::AlreadyArchivedError
# It's already archived, thus we can safely ignore this exception.
rescue StandardError => e
@@ -84,21 +67,23 @@ module Ci
def failed_archive_counter
@failed_archive_counter ||=
- Gitlab::Metrics.counter(:job_trace_archive_failed_total,
- "Counter of failed attempts of trace archiving")
+ Gitlab::Metrics.counter(:job_trace_archive_failed_total, "Counter of failed attempts of trace archiving")
end
def archive_error(error, job, worker_name)
failed_archive_counter.increment
- Sidekiq.logger.warn(class: worker_name,
- message: "Failed to archive trace. message: #{error.message}.",
- job_id: job.id)
-
- Gitlab::ErrorTracking
- .track_and_raise_for_dev_exception(error,
- issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
- job_id: job.id)
+ Sidekiq.logger.warn(
+ class: worker_name,
+ message: "Failed to archive trace. message: #{error.message}.",
+ job_id: job.id
+ )
+
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
+ error,
+ issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
+ job_id: job.id
+ )
end
end
end
diff --git a/app/services/ci/catalog/validate_resource_service.rb b/app/services/ci/catalog/validate_resource_service.rb
new file mode 100644
index 00000000000..f166c220869
--- /dev/null
+++ b/app/services/ci/catalog/validate_resource_service.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module Ci
+ module Catalog
+ class ValidateResourceService
+ attr_reader :project
+
+ def initialize(project, ref)
+ @project = project
+ @ref = ref
+ @errors = []
+ end
+
+ def execute
+ check_project_readme
+ check_project_description
+
+ if errors.empty?
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: errors.join(' , '))
+ end
+ end
+
+ private
+
+ attr_reader :ref, :errors
+
+ def check_project_description
+ return if project.description.present?
+
+ errors << 'Project must have a description'
+ end
+
+ def check_project_readme
+ return if project_has_readme?
+
+ errors << 'Project must have a README'
+ end
+
+ def project_has_readme?
+ project.repository.blob_data_at(ref, 'README.md')
+ end
+ end
+ end
+end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 390675ab80b..a8da83e84a1 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -7,7 +7,6 @@ module Ci
LOG_MAX_DURATION_THRESHOLD = 3.seconds
LOG_MAX_PIPELINE_SIZE = 2_000
LOG_MAX_CREATION_THRESHOLD = 20.seconds
-
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Build,
Gitlab::Ci::Pipeline::Chain::Build::Associations,
Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
@@ -34,7 +33,6 @@ module Ci
Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups,
Gitlab::Ci::Pipeline::Chain::Create,
Gitlab::Ci::Pipeline::Chain::CreateCrossDatabaseAssociations,
- Gitlab::Ci::Pipeline::Chain::Limit::Activity,
Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines,
Gitlab::Ci::Pipeline::Chain::Metrics,
Gitlab::Ci::Pipeline::Chain::TemplateUsage,
@@ -161,7 +159,7 @@ module Ci
pipeline_includes_count = observations['pipeline_includes_count']
next false unless pipeline_includes_count
- pipeline_includes_count.to_i > Gitlab::Ci::Config::External::Context::MAX_INCLUDES
+ pipeline_includes_count.to_i > Gitlab::Ci::Config::External::Context::TEMP_MAX_INCLUDES
end
end
end
diff --git a/app/services/ci/ensure_stage_service.rb b/app/services/ci/ensure_stage_service.rb
index cbb3a2e4709..9d5ccecbe33 100644
--- a/app/services/ci/ensure_stage_service.rb
+++ b/app/services/ci/ensure_stage_service.rb
@@ -45,10 +45,12 @@ module Ci
# rubocop: enable CodeReuse/ActiveRecord
def create_stage
- Ci::Stage.create!(name: @build.stage,
- position: @build.stage_idx,
- pipeline: @build.pipeline,
- project: @build.project)
+ Ci::Stage.create!(
+ name: @build.stage,
+ position: @build.stage_idx,
+ pipeline: @build.pipeline,
+ project: @build.project
+ )
end
end
end
diff --git a/app/services/ci/generate_kubeconfig_service.rb b/app/services/ci/generate_kubeconfig_service.rb
index 1c6aaa9d1ff..56e22a64529 100644
--- a/app/services/ci/generate_kubeconfig_service.rb
+++ b/app/services/ci/generate_kubeconfig_service.rb
@@ -41,7 +41,7 @@ module Ci
attr_reader :pipeline, :token, :environment, :template
def agent_authorizations
- ::Clusters::Agents::FilterAuthorizationsService.new(
+ ::Clusters::Agents::Authorizations::CiAccess::FilterService.new(
pipeline.cluster_agent_authorizations,
environment: environment
).execute
diff --git a/app/services/ci/job_artifacts/bulk_delete_by_project_service.rb b/app/services/ci/job_artifacts/bulk_delete_by_project_service.rb
new file mode 100644
index 00000000000..738fa19e29b
--- /dev/null
+++ b/app/services/ci/job_artifacts/bulk_delete_by_project_service.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobArtifacts
+ class BulkDeleteByProjectService
+ include BaseServiceUtility
+
+ JOB_ARTIFACTS_COUNT_LIMIT = 50
+
+ def initialize(job_artifact_ids:, project:, current_user:)
+ @job_artifact_ids = job_artifact_ids
+ @project = project
+ @current_user = current_user
+ end
+
+ def execute
+ if exceeds_limits?
+ return ServiceResponse.error(
+ message: "Can only delete up to #{JOB_ARTIFACTS_COUNT_LIMIT} job artifacts per call"
+ )
+ end
+
+ find_result = find_artifacts
+
+ return ServiceResponse.error(message: find_result[:error_message]) if find_result[:error_message]
+
+ @job_artifact_scope = find_result[:scope]
+
+ unless all_job_artifacts_belong_to_project?
+ return ServiceResponse.error(message: 'Not all artifacts belong to requested project')
+ end
+
+ result = Ci::JobArtifacts::DestroyBatchService.new(job_artifact_scope).execute
+
+ destroyed_artifacts_count = result.fetch(:destroyed_artifacts_count)
+ destroyed_ids = result.fetch(:destroyed_ids)
+
+ ServiceResponse.success(
+ payload: {
+ destroyed_count: destroyed_artifacts_count,
+ destroyed_ids: destroyed_ids,
+ errors: []
+ })
+ end
+
+ private
+
+ def find_artifacts
+ job_artifacts = ::Ci::JobArtifact.id_in(job_artifact_ids)
+
+ error_message = nil
+ if job_artifacts.count != job_artifact_ids.count
+ not_found_artifacts = job_artifact_ids - job_artifacts.map(&:id)
+ error_message = "Artifacts (#{not_found_artifacts.join(',')}) not found"
+ end
+
+ { scope: job_artifacts, error_message: error_message }
+ end
+
+ def exceeds_limits?
+ job_artifact_ids.count > JOB_ARTIFACTS_COUNT_LIMIT
+ end
+
+ def all_job_artifacts_belong_to_project?
+ # rubocop:disable CodeReuse/ActiveRecord
+ job_artifact_scope.pluck(:project_id).all?(project.id)
+ # rubocop:enable CodeReuse/ActiveRecord
+ end
+
+ attr_reader :job_artifact_ids, :job_artifact_scope, :current_user, :project
+ end
+ end
+end
diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb
index 3d19fec6617..f7e04c59463 100644
--- a/app/services/ci/job_artifacts/create_service.rb
+++ b/app/services/ci/job_artifacts/create_service.rb
@@ -23,7 +23,11 @@ module Ci
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
return result unless result[:status] == :success
- headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
+ headers = JobArtifactUploader.workhorse_authorize(
+ has_length: false,
+ maximum_size: max_size(artifact_type),
+ use_final_store_path: Feature.enabled?(:ci_artifacts_upload_to_final_location, project)
+ )
if lsif?(artifact_type)
headers[:ProcessLsif] = true
@@ -39,14 +43,18 @@ module Ci
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
- artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
- result = parse_artifact(artifact)
+ build_result = build_artifact(artifacts_file, params, metadata_file)
+ return build_result unless build_result[:status] == :success
+
+ artifact = build_result[:artifact]
+ artifact_metadata = build_result[:artifact_metadata]
track_artifact_uploader(artifact)
- return result unless result[:status] == :success
+ parse_result = parse_artifact(artifact)
+ return parse_result unless parse_result[:status] == :success
- persist_artifact(artifact, artifact_metadata, params)
+ persist_artifact(artifact, artifact_metadata)
end
private
@@ -76,44 +84,54 @@ module Ci
end
def build_artifact(artifacts_file, params, metadata_file)
- expire_in = params['expire_in'] ||
- Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
-
artifact_attributes = {
job: job,
project: project,
- expire_in: expire_in
+ expire_in: expire_in(params),
+ accessibility: accessibility(params),
+ locked: pipeline.locked
+ }
+
+ file_attributes = {
+ file_type: params[:artifact_type],
+ file_format: params[:artifact_format],
+ file_sha256: artifacts_file.sha256,
+ file: artifacts_file
}
- artifact_attributes[:locked] = pipeline.locked
+ artifact = Ci::JobArtifact.new(artifact_attributes.merge(file_attributes))
- artifact = Ci::JobArtifact.new(
- artifact_attributes.merge(
- file: artifacts_file,
- file_type: params[:artifact_type],
- file_format: params[:artifact_format],
- file_sha256: artifacts_file.sha256,
- accessibility: accessibility(params)
- )
- )
+ artifact_metadata = build_metadata_artifact(artifact, metadata_file) if metadata_file
- artifact_metadata = if metadata_file
- Ci::JobArtifact.new(
- artifact_attributes.merge(
- file: metadata_file,
- file_type: :metadata,
- file_format: :gzip,
- file_sha256: metadata_file.sha256,
- accessibility: accessibility(params)
- )
- )
- end
+ success(artifact: artifact, artifact_metadata: artifact_metadata)
+ end
+
+ def build_metadata_artifact(job_artifact, metadata_file)
+ Ci::JobArtifact.new(
+ job: job_artifact.job,
+ project: job_artifact.project,
+ expire_at: job_artifact.expire_at,
+ locked: job_artifact.locked,
+ file: metadata_file,
+ file_type: :metadata,
+ file_format: :gzip,
+ file_sha256: metadata_file.sha256,
+ accessibility: job_artifact.accessibility
+ )
+ end
- [artifact, artifact_metadata]
+ def expire_in(params)
+ params['expire_in'] || Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
end
def accessibility(params)
- params[:accessibility] || 'public'
+ accessibility = params[:accessibility]
+
+ return :public if Feature.disabled?(:non_public_artifacts, type: :development)
+
+ return accessibility if accessibility.present?
+
+ job.artifacts_public? ? :public : :private
end
def parse_artifact(artifact)
@@ -123,24 +141,26 @@ module Ci
end
end
- def persist_artifact(artifact, artifact_metadata, params)
- Ci::JobArtifact.transaction do
- artifact.save!
- artifact_metadata&.save!
-
+ def persist_artifact(artifact, artifact_metadata)
+ job.transaction do
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
+ # Running it first because in migrations we lock the `ci_builds` table
+ # first and then the others. This reduces the chances of deadlocks.
job.update_column(:artifacts_expire_at, artifact.expire_at)
+
+ artifact.save!
+ artifact_metadata&.save!
end
success(artifact: artifact)
rescue ActiveRecord::RecordNotUnique => error
- track_exception(error, params)
+ track_exception(error, artifact.file_type)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
- track_exception(error, params)
+ track_exception(error, artifact.file_type)
error(error.message, :service_unavailable)
rescue StandardError => error
- track_exception(error, params)
+ track_exception(error, artifact.file_type)
error(error.message, :bad_request)
end
@@ -151,11 +171,12 @@ module Ci
existing_artifact.file_sha256 == artifacts_file.sha256
end
- def track_exception(error, params)
- Gitlab::ErrorTracking.track_exception(error,
+ def track_exception(error, artifact_type)
+ Gitlab::ErrorTracking.track_exception(
+ error,
job_id: job.id,
project_id: job.project_id,
- uploading_type: params[:artifact_type]
+ uploading_type: artifact_type
)
end
diff --git a/app/services/ci/job_artifacts/destroy_all_expired_service.rb b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
index b5dd5b843c6..57b95e59d7d 100644
--- a/app/services/ci/job_artifacts/destroy_all_expired_service.rb
+++ b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
@@ -25,11 +25,7 @@ module Ci
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
- if ::Feature.enabled?(:ci_destroy_unlocked_job_artifacts)
- destroy_unlocked_job_artifacts
- else
- destroy_job_artifacts_with_slow_iteration
- end
+ destroy_unlocked_job_artifacts
end
@removed_artifacts_count
@@ -39,26 +35,12 @@ module Ci
def destroy_unlocked_job_artifacts
loop_until(timeout: LOOP_TIMEOUT, limit: LOOP_LIMIT) do
- artifacts = Ci::JobArtifact.expired_before(@start_at).artifact_unlocked.limit(BATCH_SIZE)
+ artifacts = Ci::JobArtifact.expired_before(@start_at).non_trace.artifact_unlocked.limit(BATCH_SIZE)
service_response = destroy_batch(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
end
end
- def destroy_job_artifacts_with_slow_iteration
- Ci::JobArtifact.expired_before(@start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
- # For performance reasons, join with ci_pipelines after the batch is queried.
- # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
- artifacts = relation.unlocked
-
- service_response = destroy_batch(artifacts)
- @removed_artifacts_count += service_response[:destroyed_artifacts_count]
-
- break if loop_timeout?
- break if index >= LOOP_LIMIT
- end
- end
-
def destroy_batch(artifacts)
Ci::JobArtifacts::DestroyBatchService.new(artifacts, skip_projects_on_refresh: true).execute
end
diff --git a/app/services/ci/job_artifacts/destroy_batch_service.rb b/app/services/ci/job_artifacts/destroy_batch_service.rb
index 7cb1be95a3e..81cbeb31711 100644
--- a/app/services/ci/job_artifacts/destroy_batch_service.rb
+++ b/app/services/ci/job_artifacts/destroy_batch_service.rb
@@ -21,6 +21,7 @@ module Ci
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
@skip_projects_on_refresh = skip_projects_on_refresh
+ @destroyed_ids = []
end
# rubocop: disable CodeReuse/ActiveRecord
@@ -31,16 +32,17 @@ module Ci
track_artifacts_undergoing_stats_refresh
end
- exclude_trace_artifacts
-
- return success(destroyed_artifacts_count: 0, statistics_updates: {}) if @job_artifacts.empty?
+ if @job_artifacts.empty?
+ return success(destroyed_ids: @destroyed_ids, destroyed_artifacts_count: 0, statistics_updates: {})
+ end
destroy_related_records(@job_artifacts)
destroy_around_hook(@job_artifacts) do
+ @destroyed_ids = @job_artifacts.map(&:id)
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
- Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
+ Ci::JobArtifact.id_in(@destroyed_ids).delete_all
end
end
@@ -52,7 +54,11 @@ module Ci
Gitlab::Ci::Artifacts::Logger.log_deleted(@job_artifacts, 'Ci::JobArtifacts::DestroyBatchService#execute')
- success(destroyed_artifacts_count: artifacts_count, statistics_updates: statistics_updates_per_project)
+ success(
+ destroyed_ids: @destroyed_ids,
+ destroyed_artifacts_count: artifacts_count,
+ statistics_updates: statistics_updates_per_project
+ )
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -110,11 +116,6 @@ module Ci
end
end
- # Traces should never be destroyed.
- def exclude_trace_artifacts
- _trace_artifacts, @job_artifacts = @job_artifacts.partition(&:trace?)
- end
-
def track_artifacts_undergoing_stats_refresh
project_ids = @job_artifacts.find_all do |artifact|
artifact.project.refreshing_build_artifacts_size?
diff --git a/app/services/ci/job_token_scope/add_project_service.rb b/app/services/ci/job_token_scope/add_project_service.rb
index 15553ad6e92..8fb543a2796 100644
--- a/app/services/ci/job_token_scope/add_project_service.rb
+++ b/app/services/ci/job_token_scope/add_project_service.rb
@@ -5,9 +5,7 @@ module Ci
class AddProjectService < ::BaseService
include EditScopeValidations
- def execute(target_project, direction: :outbound)
- direction = :outbound if Feature.disabled?(:ci_inbound_job_token_scope)
-
+ def execute(target_project, direction: :inbound)
validate_edit!(project, target_project, current_user)
link = allowlist(direction)
@@ -31,3 +29,5 @@ module Ci
end
end
end
+
+Ci::JobTokenScope::AddProjectService.prepend_mod_with('Ci::JobTokenScope::AddProjectService')
diff --git a/app/services/ci/job_token_scope/remove_project_service.rb b/app/services/ci/job_token_scope/remove_project_service.rb
index 864f9318c68..d6a2defd5b9 100644
--- a/app/services/ci/job_token_scope/remove_project_service.rb
+++ b/app/services/ci/job_token_scope/remove_project_service.rb
@@ -31,3 +31,5 @@ module Ci
end
end
end
+
+Ci::JobTokenScope::RemoveProjectService.prepend_mod_with('Ci::JobTokenScope::RemoveProjectService')
diff --git a/app/services/ci/list_config_variables_service.rb b/app/services/ci/list_config_variables_service.rb
index dbea270b7c6..1020e98f463 100644
--- a/app/services/ci/list_config_variables_service.rb
+++ b/app/services/ci/list_config_variables_service.rb
@@ -28,9 +28,12 @@ module Ci
return {} unless config.exists?
- result = Gitlab::Ci::YamlProcessor.new(config.content, project: project,
- user: current_user,
- sha: sha).execute
+ result = Gitlab::Ci::YamlProcessor.new(
+ config.content,
+ project: project,
+ user: current_user,
+ sha: sha
+ ).execute
result.valid? ? result.root_variables_with_prefill_data : {}
end
diff --git a/app/services/ci/parse_dotenv_artifact_service.rb b/app/services/ci/parse_dotenv_artifact_service.rb
index d4d5acef44e..89a3c7d9e03 100644
--- a/app/services/ci/parse_dotenv_artifact_service.rb
+++ b/app/services/ci/parse_dotenv_artifact_service.rb
@@ -44,8 +44,13 @@ module Ci
blob.each_line do |line|
key, value = scan_line!(line)
- variables[key] = Ci::JobVariable.new(job_id: artifact.job_id,
- source: :dotenv, key: key, value: value, raw: false)
+ variables[key] = Ci::JobVariable.new(
+ job_id: artifact.job_id,
+ source: :dotenv,
+ key: key,
+ value: value,
+ raw: false
+ )
end
end
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service.rb b/app/services/ci/pipeline_processing/atomic_processing_service.rb
index 2b8eb104be5..1094a131e68 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service.rb
@@ -19,9 +19,10 @@ module Ci
def execute
return unless pipeline.needs_processing?
+ # Run the process only if we can obtain an exclusive lease; returns nil if lease is unavailable
success = try_obtain_lease { process! }
- # re-schedule if we need further processing
+ # Re-schedule if we need further processing
if success && pipeline.needs_processing?
PipelineProcessWorker.perform_async(pipeline.id)
end
@@ -34,7 +35,7 @@ module Ci
def process!
update_stages!
update_pipeline!
- update_statuses_processed!
+ update_jobs_processed!
Ci::ExpirePipelineCacheService.new.execute(pipeline)
@@ -46,62 +47,61 @@ module Ci
end
def update_stage!(stage)
- # Update processables for a given stage in bulk/slices
+ # Update jobs for a given stage in bulk/slices
@collection
- .created_processable_ids_for_stage_position(stage.position)
- .in_groups_of(BATCH_SIZE, false) { |ids| update_processables!(ids) }
+ .created_job_ids_in_stage(stage.position)
+ .in_groups_of(BATCH_SIZE, false) { |ids| update_jobs!(ids) }
- status = @collection.status_for_stage_position(stage.position)
+ status = @collection.status_of_stage(stage.position)
stage.set_status(status)
end
- def update_processables!(ids)
- created_processables = pipeline.processables.id_in(ids)
+ def update_jobs!(ids)
+ created_jobs = pipeline
+ .current_processable_jobs
+ .id_in(ids)
.with_project_preload
.created
- .latest
.ordered_by_stage
.select_with_aggregated_needs(project)
- created_processables.each { |processable| update_processable!(processable) }
+ created_jobs.each { |job| update_job!(job) }
end
def update_pipeline!
pipeline.set_status(@collection.status_of_all)
end
- def update_statuses_processed!
- processing = @collection.processing_processables
+ def update_jobs_processed!
+ processing = @collection.processing_jobs
processing.each_slice(BATCH_SIZE) do |slice|
- pipeline.statuses.match_id_and_lock_version(slice)
+ pipeline.all_jobs.match_id_and_lock_version(slice)
.update_as_processed!
end
end
- def update_processable!(processable)
- status = processable_status(processable)
- return unless Ci::HasStatus::COMPLETED_STATUSES.include?(status)
+ def update_job!(job)
+ previous_status = status_of_previous_jobs(job)
+ # We do not continue to process the job if the previous status is not completed
+ return unless Ci::HasStatus::COMPLETED_STATUSES.include?(previous_status)
- # transition status if possible
- Gitlab::OptimisticLocking.retry_lock(processable, name: 'atomic_processing_update_processable') do |subject|
+ Gitlab::OptimisticLocking.retry_lock(job, name: 'atomic_processing_update_job') do |subject|
Ci::ProcessBuildService.new(project, subject.user)
- .execute(subject, status)
+ .execute(subject, previous_status)
- # update internal representation of status
- # to make the status change of processable
- # to be taken into account during further processing
- @collection.set_processable_status(
- processable.id, processable.status, processable.lock_version)
+ # update internal representation of job
+ # to make the status change of job to be taken into account during further processing
+ @collection.set_job_status(job.id, job.status, job.lock_version)
end
end
- def processable_status(processable)
- if processable.scheduling_type_dag?
- # Processable uses DAG, get status of all dependent needs
- @collection.status_for_names(processable.aggregated_needs_names.to_a, dag: true)
+ def status_of_previous_jobs(job)
+ if job.scheduling_type_dag?
+ # job uses DAG, get status of all dependent needs
+ @collection.status_of_jobs(job.aggregated_needs_names.to_a)
else
- # Processable uses Stages, get status of prior stage
- @collection.status_for_prior_stage_position(processable.stage_idx.to_i)
+ # job uses Stages, get status of prior stage
+ @collection.status_of_jobs_prior_to_stage(job.stage_idx.to_i)
end
end
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
index 676c2ecb257..85646b79254 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -8,119 +8,113 @@ module Ci
attr_reader :pipeline
- # We use these columns to perform an efficient
- # calculation of a status
- STATUSES_COLUMNS = [
- :id, :name, :status, :allow_failure,
- :stage_idx, :processed, :lock_version
- ].freeze
-
def initialize(pipeline)
@pipeline = pipeline
- @stage_statuses = {}
- @prior_stage_statuses = {}
+ @stage_jobs = {}
+ @prior_stage_jobs = {}
end
# This method updates internal status for given ID
- def set_processable_status(id, status, lock_version)
- processable = all_statuses_by_id[id]
- return unless processable
+ def set_job_status(id, status, lock_version)
+ job = all_jobs_by_id[id]
+ return unless job
- processable[:status] = status
- processable[:lock_version] = lock_version
+ job[:status] = status
+ job[:lock_version] = lock_version
end
- # This methods gets composite status of all processables
+ # This methods gets composite status of all jobs
def status_of_all
- status_for_array(all_statuses, dag: false)
+ status_for_array(all_jobs)
end
- # This methods gets composite status for processables with given names
- def status_for_names(names, dag:)
- name_statuses = all_statuses_by_name.slice(*names)
+ # This methods gets composite status for jobs at a given stage
+ def status_of_stage(stage_position)
+ strong_memoize("status_of_stage_#{stage_position}") do
+ stage_jobs = all_jobs_grouped_by_stage_position[stage_position].to_a
- status_for_array(name_statuses.values, dag: dag)
- end
-
- # This methods gets composite status for processables before given stage
- def status_for_prior_stage_position(position)
- strong_memoize("status_for_prior_stage_position_#{position}") do
- stage_statuses = all_statuses_grouped_by_stage_position
- .select { |stage_position, _| stage_position < position }
-
- status_for_array(stage_statuses.values.flatten, dag: false)
+ status_for_array(stage_jobs.flatten)
end
end
- # This methods gets a list of processables for a given stage
- def created_processable_ids_for_stage_position(current_position)
- all_statuses_grouped_by_stage_position[current_position]
- .to_a
- .select { |processable| processable[:status] == 'created' }
- .map { |processable| processable[:id] }
+ # This methods gets composite status for jobs with given names
+ def status_of_jobs(names)
+ jobs = all_jobs_by_name.slice(*names)
+
+ status_for_array(jobs.values, dag: true)
end
- # This methods gets composite status for processables at a given stage
- def status_for_stage_position(current_position)
- strong_memoize("status_for_stage_position_#{current_position}") do
- stage_statuses = all_statuses_grouped_by_stage_position[current_position].to_a
+ # This methods gets composite status for jobs before given stage
+ def status_of_jobs_prior_to_stage(stage_position)
+ strong_memoize("status_of_jobs_prior_to_stage_#{stage_position}") do
+ stage_jobs = all_jobs_grouped_by_stage_position
+ .select { |position, _| position < stage_position }
- status_for_array(stage_statuses.flatten, dag: false)
+ status_for_array(stage_jobs.values.flatten)
end
end
- # This method returns a list of all processable, that are to be processed
- def processing_processables
- all_statuses.lazy.reject { |status| status[:processed] }
+ # This methods gets a list of jobs for a given stage
+ def created_job_ids_in_stage(stage_position)
+ all_jobs_grouped_by_stage_position[stage_position]
+ .to_a
+ .select { |job| job[:status] == 'created' }
+ .map { |job| job[:id] }
+ end
+
+ # This method returns a list of all job, that are to be processed
+ def processing_jobs
+ all_jobs.lazy.reject { |job| job[:processed] }
end
private
- def status_for_array(statuses, dag:)
+ # We use these columns to perform an efficient calculation of a status
+ JOB_ATTRS = [
+ :id, :name, :status, :allow_failure,
+ :stage_idx, :processed, :lock_version
+ ].freeze
+
+ def status_for_array(jobs, dag: false)
result = Gitlab::Ci::Status::Composite
- .new(statuses, dag: dag)
+ .new(jobs, dag: dag, project: pipeline.project)
.status
result || 'success'
end
- def all_statuses_grouped_by_stage_position
- strong_memoize(:all_statuses_by_order) do
- all_statuses.group_by { |status| status[:stage_idx].to_i }
+ def all_jobs_grouped_by_stage_position
+ strong_memoize(:all_jobs_by_order) do
+ all_jobs.group_by { |job| job[:stage_idx].to_i }
end
end
- def all_statuses_by_id
- strong_memoize(:all_statuses_by_id) do
- all_statuses.index_by { |row| row[:id] }
+ def all_jobs_by_id
+ strong_memoize(:all_jobs_by_id) do
+ all_jobs.index_by { |row| row[:id] }
end
end
- def all_statuses_by_name
- strong_memoize(:statuses_by_name) do
- all_statuses.index_by { |row| row[:name] }
+ def all_jobs_by_name
+ strong_memoize(:jobs_by_name) do
+ all_jobs.index_by { |row| row[:name] }
end
end
# rubocop: disable CodeReuse/ActiveRecord
- def all_statuses
+ def all_jobs
# We fetch all relevant data in one go.
#
- # This is more efficient than relying
- # on PostgreSQL to calculate composite status
- # for us
+ # This is more efficient than relying on PostgreSQL to calculate composite status for us
#
- # Since we need to reprocess everything
- # we can fetch all of them and do processing
- # ourselves.
- strong_memoize(:all_statuses) do
- raw_statuses = pipeline
- .statuses
- .latest
+ # Since we need to reprocess everything we can fetch all of them and do processing ourselves.
+ strong_memoize(:all_jobs) do
+ raw_jobs = pipeline
+ .current_jobs
.ordered_by_stage
- .pluck(*STATUSES_COLUMNS)
+ .pluck(*JOB_ATTRS)
- raw_statuses.map do |row|
- STATUSES_COLUMNS.zip(row).to_h
+ raw_jobs.map do |row|
+ JOB_ATTRS.zip(row).to_h
end
end
end
diff --git a/app/services/ci/pipeline_schedules/take_ownership_service.rb b/app/services/ci/pipeline_schedules/take_ownership_service.rb
index 9b4001c74bd..b4d193cb875 100644
--- a/app/services/ci/pipeline_schedules/take_ownership_service.rb
+++ b/app/services/ci/pipeline_schedules/take_ownership_service.rb
@@ -23,7 +23,7 @@ module Ci
attr_reader :schedule, :user
def allowed?
- user.can?(:take_ownership_pipeline_schedule, schedule)
+ user.can?(:admin_pipeline_schedule, schedule)
end
def forbidden
diff --git a/app/services/ci/pipelines/add_job_service.rb b/app/services/ci/pipelines/add_job_service.rb
index dfbb37cf0dc..1a5c8d0dccf 100644
--- a/app/services/ci/pipelines/add_job_service.rb
+++ b/app/services/ci/pipelines/add_job_service.rb
@@ -18,6 +18,12 @@ module Ci
in_lock("ci:pipelines:#{pipeline.id}:add-job", ttl: LOCK_TIMEOUT, sleep_sec: LOCK_SLEEP, retries: LOCK_RETRIES) do
Ci::Pipeline.transaction do
+ # This is used to reduce the deadlocks when partitioning `ci_builds`
+ # since inserting into this table requires locks on all foreign keys
+ # and we need to lock all the tables in a specific order for the
+ # migration to succeed.
+ Ci::Pipeline.connection.execute('LOCK "ci_pipelines", "ci_stages" IN ROW SHARE MODE;')
+
yield(job)
job.update_older_statuses_retried!
diff --git a/app/services/ci/process_build_service.rb b/app/services/ci/process_build_service.rb
index a5300cfd29f..afaf18a4de2 100644
--- a/app/services/ci/process_build_service.rb
+++ b/app/services/ci/process_build_service.rb
@@ -2,40 +2,40 @@
module Ci
class ProcessBuildService < BaseService
- def execute(build, current_status)
- if valid_statuses_for_build(build).include?(current_status)
- process(build)
+ def execute(processable, current_status)
+ if valid_statuses_for_processable(processable).include?(current_status)
+ process(processable)
true
else
- build.skip
+ processable.skip
false
end
end
private
- def process(build)
- return enqueue(build) if build.enqueue_immediately?
+ def process(processable)
+ return enqueue(processable) if processable.enqueue_immediately?
- if build.schedulable?
- build.schedule
- elsif build.action?
- build.actionize
+ if processable.schedulable?
+ processable.schedule
+ elsif processable.action?
+ processable.actionize
else
- enqueue(build)
+ enqueue(processable)
end
end
- def enqueue(build)
- return build.drop!(:failed_outdated_deployment_job) if build.outdated_deployment?
+ def enqueue(processable)
+ return processable.drop!(:failed_outdated_deployment_job) if processable.outdated_deployment?
- build.enqueue
+ processable.enqueue
end
- def valid_statuses_for_build(build)
- case build.when
+ def valid_statuses_for_processable(processable)
+ case processable.when
when 'on_success', 'manual', 'delayed'
- build.scheduling_type_dag? ? %w[success] : %w[success skipped]
+ processable.scheduling_type_dag? ? %w[success] : %w[success skipped]
when 'on_failure'
%w[failed]
when 'always'
diff --git a/app/services/ci/queue/build_queue_service.rb b/app/services/ci/queue/build_queue_service.rb
index 2deebc1d725..d6a252df82f 100644
--- a/app/services/ci/queue/build_queue_service.rb
+++ b/app/services/ci/queue/build_queue_service.rb
@@ -34,10 +34,6 @@ module Ci
order(relation)
end
- def builds_queued_before(relation, time)
- relation.queued_before(time)
- end
-
def builds_for_protected_runner(relation)
relation.ref_protected
end
diff --git a/app/services/ci/queue/pending_builds_strategy.rb b/app/services/ci/queue/pending_builds_strategy.rb
index cfafe66d10b..b2929390e58 100644
--- a/app/services/ci/queue/pending_builds_strategy.rb
+++ b/app/services/ci/queue/pending_builds_strategy.rb
@@ -57,9 +57,10 @@ module Ci
# if disaster recovery is enabled, we fallback to FIFO scheduling
relation.order('ci_pending_builds.build_id ASC')
else
- # Implement fair scheduling
- # this returns builds that are ordered by number of running builds
- # we prefer projects that don't use shared runners at all
+ # Implements Fair Scheduling
+ # Builds are ordered by projects that have the fewest running builds.
+ # This keeps projects that create many builds at once from hogging capacity but
+ # has the downside of penalizing projects with lots of builds created in a short period of time
relation
.with(running_builds_for_shared_runners_cte.to_arel)
.joins("LEFT JOIN project_builds ON ci_pending_builds.project_id = project_builds.project_id")
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index 205da2632c2..68ebb376ccd 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -6,7 +6,7 @@ module Ci
class RegisterJobService
include ::Gitlab::Ci::Artifacts::Logger
- attr_reader :runner, :runner_machine, :metrics
+ attr_reader :runner, :runner_manager, :metrics
TEMPORARY_LOCK_TIMEOUT = 3.seconds
@@ -18,9 +18,9 @@ module Ci
# affect 5% of the worst case scenarios.
MAX_QUEUE_DEPTH = 45
- def initialize(runner, runner_machine)
+ def initialize(runner, runner_manager)
@runner = runner
- @runner_machine = runner_machine
+ @runner_manager = runner_manager
@metrics = ::Gitlab::Ci::Queue::Metrics.new(runner)
end
@@ -129,11 +129,6 @@ module Ci
builds = queue.builds_with_any_tags(builds)
end
- # pick builds that older than specified age
- if params.key?(:job_age)
- builds = queue.builds_queued_before(builds, params[:job_age].seconds.ago)
- end
-
build_ids = retrieve_queue(-> { queue.execute(builds) })
@metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
@@ -244,7 +239,6 @@ module Ci
def assign_runner!(build, params)
build.runner_id = runner.id
build.runner_session_attributes = params[:session] if params[:session].present?
- build.ensure_metadata.runner_machine = runner_machine if runner_machine
failure_reason, _ = pre_assign_runner_checks.find { |_, check| check.call(build, params) }
@@ -256,6 +250,7 @@ module Ci
@metrics.increment_queue_operation(:runner_pre_assign_checks_success)
build.run!
+ build.runner_manager = runner_manager if runner_manager
end
!failure_reason
diff --git a/app/services/ci/reset_skipped_jobs_service.rb b/app/services/ci/reset_skipped_jobs_service.rb
index eb809b0162c..cb793eb3e06 100644
--- a/app/services/ci/reset_skipped_jobs_service.rb
+++ b/app/services/ci/reset_skipped_jobs_service.rb
@@ -4,8 +4,10 @@ module Ci
# This service resets skipped jobs so they can be processed again.
# It affects the jobs that depend on the passed in job parameter.
class ResetSkippedJobsService < ::BaseService
- def execute(processable)
- @processable = processable
+ def execute(processables)
+ @processables = Array.wrap(processables)
+ @pipeline = @processables.first.pipeline
+ @processable = @processables.first # Remove with FF `ci_support_reset_skipped_jobs_for_multiple_jobs`
process_subsequent_jobs
reset_source_bridge
@@ -20,13 +22,13 @@ module Ci
end
def reset_source_bridge
- @processable.pipeline.reset_source_bridge!(current_user)
+ @pipeline.reset_source_bridge!(current_user)
end
# rubocop: disable CodeReuse/ActiveRecord
def dependent_jobs
ordered_by_dag(
- @processable.pipeline.processables
+ @pipeline.processables
.from_union(needs_dependent_jobs, stage_dependent_jobs)
.skipped
.ordered_by_stage
@@ -41,13 +43,27 @@ module Ci
end
def stage_dependent_jobs
- @processable.pipeline.processables.after_stage(@processable.stage_idx)
+ if ::Feature.enabled?(:ci_support_reset_skipped_jobs_for_multiple_jobs, project)
+ # Get all jobs after the earliest stage of the inputted jobs
+ min_stage_idx = @processables.map(&:stage_idx).min
+ @pipeline.processables.after_stage(min_stage_idx)
+ else
+ @pipeline.processables.after_stage(@processable.stage_idx)
+ end
end
def needs_dependent_jobs
- ::Gitlab::Ci::ProcessableObjectHierarchy.new(
- ::Ci::Processable.where(id: @processable.id)
- ).descendants
+ if ::Feature.enabled?(:ci_support_reset_skipped_jobs_for_multiple_jobs, project)
+ # We must include the hierarchy base here because @processables may include both a parent job
+ # and its dependents, and we do not want to exclude those dependents from being processed.
+ ::Gitlab::Ci::ProcessableObjectHierarchy.new(
+ ::Ci::Processable.where(id: @processables.map(&:id))
+ ).base_and_descendants
+ else
+ ::Gitlab::Ci::ProcessableObjectHierarchy.new(
+ ::Ci::Processable.where(id: @processable.id)
+ ).descendants
+ end
end
def ordered_by_dag(jobs)
diff --git a/app/services/ci/runners/create_runner_service.rb b/app/services/ci/runners/create_runner_service.rb
index 2de9ee4d38e..ff4a33e431b 100644
--- a/app/services/ci/runners/create_runner_service.rb
+++ b/app/services/ci/runners/create_runner_service.rb
@@ -5,39 +5,44 @@ module Ci
class CreateRunnerService
RUNNER_CLASS_MAPPING = {
'instance_type' => Ci::Runners::RunnerCreationStrategies::InstanceRunnerStrategy,
- nil => Ci::Runners::RunnerCreationStrategies::InstanceRunnerStrategy
+ 'group_type' => Ci::Runners::RunnerCreationStrategies::GroupRunnerStrategy,
+ 'project_type' => Ci::Runners::RunnerCreationStrategies::ProjectRunnerStrategy
}.freeze
- attr_accessor :user, :type, :params, :strategy
-
- def initialize(user:, type:, params:)
+ def initialize(user:, params:)
@user = user
- @type = type
@params = params
- @strategy = RUNNER_CLASS_MAPPING[type].new(user: user, type: type, params: params)
+ @strategy = RUNNER_CLASS_MAPPING[params[:runner_type]].new(user: user, params: params)
end
def execute
normalize_params
- return ServiceResponse.error(message: 'Validation error') unless strategy.validate_params
- return ServiceResponse.error(message: 'Insufficient permissions') unless strategy.authorized_user?
+ error = strategy.validate_params
+ return ServiceResponse.error(message: error, reason: :validation_error) if error
+
+ unless strategy.authorized_user?
+ return ServiceResponse.error(message: _('Insufficient permissions'), reason: :forbidden)
+ end
runner = ::Ci::Runner.new(params)
return ServiceResponse.success(payload: { runner: runner }) if runner.save
- ServiceResponse.error(message: runner.errors.full_messages)
+ ServiceResponse.error(message: runner.errors.full_messages, reason: :save_error)
end
def normalize_params
params[:registration_type] = :authenticated_user
- params[:runner_type] = type
- params[:active] = !params.delete(:paused) if params[:paused].present?
+ params[:active] = !params.delete(:paused) if params.key?(:paused)
params[:creator] = user
strategy.normalize_params
end
+
+ private
+
+ attr_reader :user, :params, :strategy
end
end
end
diff --git a/app/services/ci/runners/process_runner_version_update_service.rb b/app/services/ci/runners/process_runner_version_update_service.rb
index c8a5e42ccab..5c42a2ab018 100644
--- a/app/services/ci/runners/process_runner_version_update_service.rb
+++ b/app/services/ci/runners/process_runner_version_update_service.rb
@@ -8,6 +8,7 @@ module Ci
end
def execute
+ return ServiceResponse.error(message: 'version update disabled') unless enabled?
return ServiceResponse.error(message: 'version not present') unless @version
_, status = upgrade_check_service.check_runner_upgrade_suggestion(@version)
@@ -22,6 +23,10 @@ module Ci
def upgrade_check_service
@runner_upgrade_check ||= Gitlab::Ci::RunnerUpgradeCheck.new(::Gitlab::VERSION)
end
+
+ def enabled?
+ Gitlab::Ci::RunnerReleases.instance.enabled?
+ end
end
end
end
diff --git a/app/services/ci/runners/register_runner_service.rb b/app/services/ci/runners/register_runner_service.rb
index db16b86d5e6..0c13c32e236 100644
--- a/app/services/ci/runners/register_runner_service.rb
+++ b/app/services/ci/runners/register_runner_service.rb
@@ -3,12 +3,23 @@
module Ci
module Runners
class RegisterRunnerService
- def execute(registration_token, attributes)
- runner_type_attrs = extract_runner_type_attrs(registration_token)
+ include Gitlab::Utils::StrongMemoize
- return ServiceResponse.error(message: 'invalid token supplied', http_status: :forbidden) unless runner_type_attrs
+ def initialize(registration_token, attributes)
+ @registration_token = registration_token
+ @attributes = attributes
+ end
+
+ def execute
+ return ServiceResponse.error(message: 'invalid token supplied', http_status: :forbidden) unless attrs_from_token
+
+ unless registration_token_allowed?(attrs_from_token)
+ return ServiceResponse.error(
+ message: 'runner registration disallowed',
+ reason: :runner_registration_disallowed)
+ end
- runner = ::Ci::Runner.new(attributes.merge(runner_type_attrs))
+ runner = ::Ci::Runner.new(attributes.merge(attrs_from_token))
Ci::BulkInsertableTags.with_bulk_insert_tags do
Ci::Runner.transaction do
@@ -25,32 +36,30 @@ module Ci
private
- def extract_runner_type_attrs(registration_token)
- @attrs_from_token ||= check_token(registration_token)
-
- return unless @attrs_from_token
+ attr_reader :registration_token, :attributes
- attrs = @attrs_from_token.clone
- case attrs[:runner_type]
- when :project_type
- attrs[:projects] = [attrs.delete(:scope)]
- when :group_type
- attrs[:groups] = [attrs.delete(:scope)]
- end
-
- attrs
- end
-
- def check_token(registration_token)
+ def attrs_from_token
if runner_registration_token_valid?(registration_token)
# Create shared runner. Requires admin access
{ runner_type: :instance_type }
elsif runner_registrar_valid?('project') && project = ::Project.find_by_runners_token(registration_token)
# Create a project runner
- { runner_type: :project_type, scope: project }
+ { runner_type: :project_type, projects: [project] }
elsif runner_registrar_valid?('group') && group = ::Group.find_by_runners_token(registration_token)
# Create a group runner
- { runner_type: :group_type, scope: group }
+ { runner_type: :group_type, groups: [group] }
+ end
+ end
+ strong_memoize_attr :attrs_from_token
+
+ def registration_token_allowed?(attrs)
+ case attrs[:runner_type]
+ when :group_type
+ token_scope.allow_runner_registration_token?
+ when :project_type
+ token_scope.namespace.allow_runner_registration_token?
+ else
+ Gitlab::CurrentSettings.allow_runner_registration_token
end
end
@@ -63,7 +72,13 @@ module Ci
end
def token_scope
- @attrs_from_token[:scope]
+ case attrs_from_token[:runner_type]
+ when :project_type
+ attrs_from_token[:projects]&.first
+ when :group_type
+ attrs_from_token[:groups]&.first
+ # No scope for instance type
+ end
end
end
end
diff --git a/app/services/ci/runners/runner_creation_strategies/group_runner_strategy.rb b/app/services/ci/runners/runner_creation_strategies/group_runner_strategy.rb
new file mode 100644
index 00000000000..2eae5069046
--- /dev/null
+++ b/app/services/ci/runners/runner_creation_strategies/group_runner_strategy.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Ci
+ module Runners
+ module RunnerCreationStrategies
+ class GroupRunnerStrategy
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(user:, params:)
+ @user = user
+ @params = params
+ end
+
+ def normalize_params
+ params[:runner_type] = 'group_type'
+ params[:groups] = [scope]
+ end
+
+ def validate_params
+ _('Missing/invalid scope') unless scope.present?
+ end
+
+ def authorized_user?
+ user.present? && user.can?(:create_runner, scope)
+ end
+
+ private
+
+ attr_reader :user, :params
+
+ def scope
+ params.delete(:scope)
+ end
+ strong_memoize_attr :scope
+ end
+ end
+ end
+end
diff --git a/app/services/ci/runners/runner_creation_strategies/instance_runner_strategy.rb b/app/services/ci/runners/runner_creation_strategies/instance_runner_strategy.rb
index f195c3e88f9..39719ad806f 100644
--- a/app/services/ci/runners/runner_creation_strategies/instance_runner_strategy.rb
+++ b/app/services/ci/runners/runner_creation_strategies/instance_runner_strategy.rb
@@ -4,25 +4,26 @@ module Ci
module Runners
module RunnerCreationStrategies
class InstanceRunnerStrategy
- attr_accessor :user, :type, :params
-
- def initialize(user:, type:, params:)
+ def initialize(user:, params:)
@user = user
- @type = type
@params = params
end
def normalize_params
- params[:runner_type] = :instance_type
+ params[:runner_type] = 'instance_type'
end
def validate_params
- true
+ _('Unexpected scope') if params[:scope]
end
def authorized_user?
- user.present? && user.can?(:create_instance_runners)
+ user.present? && user.can?(:create_instance_runner)
end
+
+ private
+
+ attr_reader :user, :params
end
end
end
diff --git a/app/services/ci/runners/runner_creation_strategies/project_runner_strategy.rb b/app/services/ci/runners/runner_creation_strategies/project_runner_strategy.rb
new file mode 100644
index 00000000000..487da996513
--- /dev/null
+++ b/app/services/ci/runners/runner_creation_strategies/project_runner_strategy.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Ci
+ module Runners
+ module RunnerCreationStrategies
+ class ProjectRunnerStrategy
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(user:, params:)
+ @user = user
+ @params = params
+ end
+
+ def normalize_params
+ params[:runner_type] = 'project_type'
+ params[:projects] = [scope]
+ end
+
+ def validate_params
+ _('Missing/invalid scope') unless scope.present?
+ end
+
+ def authorized_user?
+ user.present? && user.can?(:create_runner, scope)
+ end
+
+ private
+
+ attr_reader :user, :params
+
+ def scope
+ params.delete(:scope)
+ end
+ strong_memoize_attr :scope
+ end
+ end
+ end
+end
diff --git a/app/services/ci/runners/stale_machines_cleanup_service.rb b/app/services/ci/runners/stale_managers_cleanup_service.rb
index 3e5706d24a6..b39f7315bc6 100644
--- a/app/services/ci/runners/stale_machines_cleanup_service.rb
+++ b/app/services/ci/runners/stale_managers_cleanup_service.rb
@@ -2,25 +2,25 @@
module Ci
module Runners
- class StaleMachinesCleanupService
+ class StaleManagersCleanupService
MAX_DELETIONS = 1000
def execute
ServiceResponse.success(payload: {
# the `stale` relationship can return duplicates, so we don't try to return a precise count here
- deleted_machines: delete_stale_runner_machines > 0
+ deleted_managers: delete_stale_runner_managers > 0
})
end
private
- def delete_stale_runner_machines
+ def delete_stale_runner_managers
total_deleted_count = 0
loop do
sub_batch_limit = [100, MAX_DELETIONS].min
# delete_all discards part of the `stale` scope query, so we expliclitly wrap it with a SELECT as a workaround
- deleted_count = Ci::RunnerMachine.id_in(Ci::RunnerMachine.stale.limit(sub_batch_limit)).delete_all
+ deleted_count = Ci::RunnerManager.id_in(Ci::RunnerManager.stale.limit(sub_batch_limit)).delete_all
total_deleted_count += deleted_count
break if deleted_count == 0 || total_deleted_count >= MAX_DELETIONS
diff --git a/app/services/ci/runners/unregister_runner_manager_service.rb b/app/services/ci/runners/unregister_runner_manager_service.rb
new file mode 100644
index 00000000000..ecf6aba09c7
--- /dev/null
+++ b/app/services/ci/runners/unregister_runner_manager_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Ci
+ module Runners
+ class UnregisterRunnerManagerService
+ attr_reader :runner, :author, :system_id
+
+ # @param [Ci::Runner] runner the runner to unregister/destroy
+ # @param [User, authentication token String] author the user or the authentication token authorizing the removal
+ # @param [String] system_id ID of the system being unregistered
+ def initialize(runner, author, system_id:)
+ @runner = runner
+ @author = author
+ @system_id = system_id
+ end
+
+ def execute
+ return system_id_missing_error if system_id.blank?
+
+ runner_manager = runner.runner_managers.find_by_system_xid!(system_id)
+ runner_manager.destroy!
+
+ ServiceResponse.success
+ end
+
+ private
+
+ def system_id_missing_error
+ ServiceResponse.error(message: '`system_id` needs to be specified for runners created in the UI.')
+ end
+ end
+ end
+end
diff --git a/app/services/ci/runners/unregister_runner_service.rb b/app/services/ci/runners/unregister_runner_service.rb
index 742b21f77df..d186bd421d5 100644
--- a/app/services/ci/runners/unregister_runner_service.rb
+++ b/app/services/ci/runners/unregister_runner_service.rb
@@ -13,7 +13,8 @@ module Ci
end
def execute
- @runner&.destroy
+ runner.destroy!
+
ServiceResponse.success
end
end
diff --git a/app/services/ci/stuck_builds/drop_helpers.rb b/app/services/ci/stuck_builds/drop_helpers.rb
index f56c9aaeb55..4ce30a6068c 100644
--- a/app/services/ci/stuck_builds/drop_helpers.rb
+++ b/app/services/ci/stuck_builds/drop_helpers.rb
@@ -45,23 +45,26 @@ module Ci
end
def track_exception_for_build(ex, build)
- Gitlab::ErrorTracking.track_exception(ex,
- build_id: build.id,
- build_name: build.name,
- build_stage: build.stage_name,
- pipeline_id: build.pipeline_id,
- project_id: build.project_id
+ Gitlab::ErrorTracking.track_exception(
+ ex,
+ build_id: build.id,
+ build_name: build.name,
+ build_stage: build.stage_name,
+ pipeline_id: build.pipeline_id,
+ project_id: build.project_id
)
end
def log_dropping_message(type, build, reason)
- Gitlab::AppLogger.info(class: self.class.name,
- message: "Dropping #{type} build",
- build_stuck_type: type,
- build_id: build.id,
- runner_id: build.runner_id,
- build_status: build.status,
- build_failure_reason: reason)
+ Gitlab::AppLogger.info(
+ class: self.class.name,
+ message: "Dropping #{type} build",
+ build_stuck_type: type,
+ build_id: build.id,
+ runner_id: build.runner_id,
+ build_status: build.status,
+ build_failure_reason: reason
+ )
end
end
end
diff --git a/app/services/ci/track_failed_build_service.rb b/app/services/ci/track_failed_build_service.rb
index 973c43a9445..cd7d548e102 100644
--- a/app/services/ci/track_failed_build_service.rb
+++ b/app/services/ci/track_failed_build_service.rb
@@ -6,7 +6,7 @@
# @param exit_code [Int] the resulting exit code.
module Ci
class TrackFailedBuildService
- SCHEMA_URL = 'iglu:com.gitlab/ci_build_failed/jsonschema/1-0-1'
+ SCHEMA_URL = 'iglu:com.gitlab/ci_build_failed/jsonschema/1-0-2'
def initialize(build:, exit_code:, failure_reason:)
@build = build
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index 58927a90b6e..40941dd4cd0 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -37,7 +37,7 @@ module Ci
end
##
- # Force recemove build from the queue, without checking a transition state
+ # Force remove build from the queue, without checking a transition state
#
def remove!(build)
removed = build.all_queuing_entries.delete_all
diff --git a/app/services/clusters/agent_tokens/create_service.rb b/app/services/clusters/agent_tokens/create_service.rb
index 2539ffdc5ba..66a3cb04d98 100644
--- a/app/services/clusters/agent_tokens/create_service.rb
+++ b/app/services/clusters/agent_tokens/create_service.rb
@@ -2,16 +2,24 @@
module Clusters
module AgentTokens
- class CreateService < ::BaseContainerService
+ class CreateService
ALLOWED_PARAMS = %i[agent_id description name].freeze
+ attr_reader :agent, :current_user, :params
+
+ def initialize(agent:, current_user:, params:)
+ @agent = agent
+ @current_user = current_user
+ @params = params
+ end
+
def execute
- return error_no_permissions unless current_user.can?(:create_cluster, container)
+ return error_no_permissions unless current_user.can?(:create_cluster, agent.project)
- token = ::Clusters::AgentToken.new(filtered_params.merge(created_by_user: current_user))
+ token = ::Clusters::AgentToken.new(filtered_params.merge(agent_id: agent.id, created_by_user: current_user))
if token.save
- log_activity_event!(token)
+ log_activity_event(token)
ServiceResponse.success(payload: { secret: token.token, token: token })
else
@@ -29,7 +37,7 @@ module Clusters
params.slice(*ALLOWED_PARAMS)
end
- def log_activity_event!(token)
+ def log_activity_event(token)
Clusters::Agents::CreateActivityEventService.new(
token.agent,
kind: :token_created,
@@ -42,3 +50,5 @@ module Clusters
end
end
end
+
+Clusters::AgentTokens::CreateService.prepend_mod
diff --git a/app/services/clusters/agent_tokens/revoke_service.rb b/app/services/clusters/agent_tokens/revoke_service.rb
new file mode 100644
index 00000000000..5d89b405969
--- /dev/null
+++ b/app/services/clusters/agent_tokens/revoke_service.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module Clusters
+ module AgentTokens
+ class RevokeService
+ attr_reader :current_project, :current_user, :token
+
+ def initialize(token:, current_user:)
+ @token = token
+ @current_user = current_user
+ end
+
+ def execute
+ return error_no_permissions unless current_user.can?(:create_cluster, token.agent.project)
+
+ if token.update(status: token.class.statuses[:revoked])
+ log_activity_event(token)
+
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: token.errors.full_messages)
+ end
+ end
+
+ private
+
+ def error_no_permissions
+ ServiceResponse.error(
+ message: s_('ClusterAgent|User has insufficient permissions to revoke the token for this project'))
+ end
+
+ def log_activity_event(token)
+ Clusters::Agents::CreateActivityEventService.new(
+ token.agent,
+ kind: :token_revoked,
+ level: :info,
+ recorded_at: token.updated_at,
+ user: current_user,
+ agent_token: token
+ ).execute
+ end
+ end
+ end
+end
+
+Clusters::AgentTokens::RevokeService.prepend_mod
diff --git a/app/services/clusters/agents/authorizations/ci_access/filter_service.rb b/app/services/clusters/agents/authorizations/ci_access/filter_service.rb
new file mode 100644
index 00000000000..cd08aaa12d4
--- /dev/null
+++ b/app/services/clusters/agents/authorizations/ci_access/filter_service.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ module Authorizations
+ module CiAccess
+ class FilterService
+ def initialize(authorizations, filter_params)
+ @authorizations = authorizations
+ @filter_params = filter_params
+
+ @environments_matcher = {}
+ end
+
+ def execute
+ filter_by_environment(authorizations)
+ end
+
+ private
+
+ attr_reader :authorizations, :filter_params
+
+ def filter_by_environment(auths)
+ return auths unless filter_by_environment?
+
+ auths.select do |auth|
+ next true if auth.config['environments'].blank?
+
+ auth.config['environments'].any? { |environment_pattern| matches_environment?(environment_pattern) }
+ end
+ end
+
+ def filter_by_environment?
+ filter_params.has_key?(:environment)
+ end
+
+ def environment_filter
+ @environment_filter ||= filter_params[:environment]
+ end
+
+ def matches_environment?(environment_pattern)
+ return false if environment_filter.nil?
+
+ environments_matcher(environment_pattern).match?(environment_filter)
+ end
+
+ def environments_matcher(environment_pattern)
+ @environments_matcher[environment_pattern] ||= ::Gitlab::Ci::EnvironmentMatcher.new(environment_pattern)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/agents/authorizations/ci_access/refresh_service.rb b/app/services/clusters/agents/authorizations/ci_access/refresh_service.rb
new file mode 100644
index 00000000000..047a0725a2c
--- /dev/null
+++ b/app/services/clusters/agents/authorizations/ci_access/refresh_service.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ module Authorizations
+ module CiAccess
+ class RefreshService
+ include Gitlab::Utils::StrongMemoize
+
+ AUTHORIZED_ENTITY_LIMIT = 100
+
+ delegate :project, to: :agent, private: true
+ delegate :root_ancestor, to: :project, private: true
+
+ def initialize(agent, config:)
+ @agent = agent
+ @config = config
+ end
+
+ def execute
+ refresh_projects!
+ refresh_groups!
+
+ true
+ end
+
+ private
+
+ attr_reader :agent, :config
+
+ def refresh_projects!
+ if allowed_project_configurations.present?
+ project_ids = allowed_project_configurations.map { |config| config.fetch(:project_id) }
+
+ agent.with_lock do
+ agent.ci_access_project_authorizations.upsert_all(allowed_project_configurations, unique_by: [:agent_id, :project_id])
+ agent.ci_access_project_authorizations.where.not(project_id: project_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
+ end
+ else
+ agent.ci_access_project_authorizations.delete_all(:delete_all)
+ end
+ end
+
+ def refresh_groups!
+ if allowed_group_configurations.present?
+ group_ids = allowed_group_configurations.map { |config| config.fetch(:group_id) }
+
+ agent.with_lock do
+ agent.ci_access_group_authorizations.upsert_all(allowed_group_configurations, unique_by: [:agent_id, :group_id])
+ agent.ci_access_group_authorizations.where.not(group_id: group_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
+ end
+ else
+ agent.ci_access_group_authorizations.delete_all(:delete_all)
+ end
+ end
+
+ def allowed_project_configurations
+ strong_memoize(:allowed_project_configurations) do
+ project_entries = extract_config_entries(entity: 'projects')
+
+ if project_entries
+ allowed_projects.where_full_path_in(project_entries.keys).map do |project|
+ { project_id: project.id, config: project_entries[project.full_path.downcase] }
+ end
+ end
+ end
+ end
+
+ def allowed_group_configurations
+ strong_memoize(:allowed_group_configurations) do
+ group_entries = extract_config_entries(entity: 'groups')
+
+ if group_entries
+ allowed_groups.where_full_path_in(group_entries.keys).map do |group|
+ { group_id: group.id, config: group_entries[group.full_path.downcase] }
+ end
+ end
+ end
+ end
+
+ def extract_config_entries(entity:)
+ config.dig('ci_access', entity)
+ &.first(AUTHORIZED_ENTITY_LIMIT)
+ &.index_by { |config| config.delete('id').downcase }
+ end
+
+ def allowed_projects
+ root_ancestor.all_projects
+ end
+
+ def allowed_groups
+ if group_root_ancestor?
+ root_ancestor.self_and_descendants
+ else
+ ::Group.none
+ end
+ end
+
+ def group_root_ancestor?
+ root_ancestor.group_namespace?
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/agents/authorizations/user_access/refresh_service.rb b/app/services/clusters/agents/authorizations/user_access/refresh_service.rb
new file mode 100644
index 00000000000..04d6e04c54d
--- /dev/null
+++ b/app/services/clusters/agents/authorizations/user_access/refresh_service.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ module Authorizations
+ module UserAccess
+ class RefreshService
+ include Gitlab::Utils::StrongMemoize
+
+ AUTHORIZED_ENTITY_LIMIT = 100
+
+ delegate :project, to: :agent, private: true
+ delegate :root_ancestor, to: :project, private: true
+
+ def initialize(agent, config:)
+ @agent = agent
+ @config = config
+ end
+
+ def execute
+ refresh_projects!
+ refresh_groups!
+
+ true
+ end
+
+ private
+
+ attr_reader :agent, :config
+
+ def refresh_projects!
+ if allowed_project_configurations.present?
+ project_ids = allowed_project_configurations.map { |config| config.fetch(:project_id) }
+
+ agent.with_lock do
+ agent.user_access_project_authorizations.upsert_configs(allowed_project_configurations)
+ agent.user_access_project_authorizations.delete_unlisted(project_ids)
+ end
+ else
+ agent.user_access_project_authorizations.delete_all(:delete_all)
+ end
+ end
+
+ def refresh_groups!
+ if allowed_group_configurations.present?
+ group_ids = allowed_group_configurations.map { |config| config.fetch(:group_id) }
+
+ agent.with_lock do
+ agent.user_access_group_authorizations.upsert_configs(allowed_group_configurations)
+ agent.user_access_group_authorizations.delete_unlisted(group_ids)
+ end
+ else
+ agent.user_access_group_authorizations.delete_all(:delete_all)
+ end
+ end
+
+ def allowed_project_configurations
+ project_entries = extract_config_entries(entity: 'projects')
+
+ return unless project_entries
+
+ allowed_projects.where_full_path_in(project_entries.keys).map do |project|
+ { project_id: project.id, config: user_access_as }
+ end
+ end
+ strong_memoize_attr :allowed_project_configurations
+
+ def allowed_group_configurations
+ group_entries = extract_config_entries(entity: 'groups')
+
+ return unless group_entries
+
+ allowed_groups.where_full_path_in(group_entries.keys).map do |group|
+ { group_id: group.id, config: user_access_as }
+ end
+ end
+ strong_memoize_attr :allowed_group_configurations
+
+ def extract_config_entries(entity:)
+ config.dig('user_access', entity)
+ &.first(AUTHORIZED_ENTITY_LIMIT)
+ &.index_by { |config| config.delete('id').downcase }
+ end
+
+ def allowed_projects
+ root_ancestor.all_projects
+ end
+
+ def allowed_groups
+ if group_root_ancestor?
+ root_ancestor.self_and_descendants
+ else
+ ::Group.none
+ end
+ end
+
+ def group_root_ancestor?
+ root_ancestor.group_namespace?
+ end
+
+ def user_access_as
+ @user_access_as ||= config['user_access']&.slice('access_as') || {}
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/agents/authorize_proxy_user_service.rb b/app/services/clusters/agents/authorize_proxy_user_service.rb
new file mode 100644
index 00000000000..fbcf25153c1
--- /dev/null
+++ b/app/services/clusters/agents/authorize_proxy_user_service.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ class AuthorizeProxyUserService < ::BaseService
+ include ::Gitlab::Utils::StrongMemoize
+
+ def initialize(current_user, agent)
+ @current_user = current_user
+ @agent = agent
+ end
+
+ def execute
+ return forbidden unless user_access_config.present?
+
+ access_as = user_access_config['access_as']
+ return forbidden unless access_as.present?
+ return forbidden if access_as.size != 1
+
+ if payload = handle_access(access_as)
+ return success(payload: payload)
+ end
+
+ forbidden
+ end
+
+ private
+
+ attr_reader :current_user, :agent
+
+ # Override in EE
+ def handle_access(access_as)
+ access_as_agent if access_as.key?('agent')
+ end
+
+ def authorizations
+ @authorizations ||= ::Clusters::Agents::Authorizations::UserAccess::Finder
+ .new(current_user, agent: agent).execute
+ end
+
+ def response_base
+ {
+ agent: {
+ id: agent.id,
+ config_project: { id: agent.project_id }
+ },
+ user: {
+ id: current_user.id,
+ username: current_user.username
+ }
+ }
+ end
+
+ def access_as_agent
+ return if authorizations.empty?
+
+ response_base.merge(access_as: { agent: {} })
+ end
+
+ def user_access_config
+ agent.user_access_config
+ end
+ strong_memoize_attr :user_access_config
+
+ delegate :success, to: ServiceResponse, private: true
+
+ def forbidden
+ ServiceResponse.error(reason: :forbidden, message: '403 Forbidden')
+ end
+ end
+ end
+end
+
+Clusters::Agents::AuthorizeProxyUserService.prepend_mod
diff --git a/app/services/clusters/agents/create_activity_event_service.rb b/app/services/clusters/agents/create_activity_event_service.rb
index 886dddf1a52..87554f0e495 100644
--- a/app/services/clusters/agents/create_activity_event_service.rb
+++ b/app/services/clusters/agents/create_activity_event_service.rb
@@ -14,6 +14,10 @@ module Clusters
DeleteExpiredEventsWorker.perform_at(schedule_cleanup_at, agent.id)
ServiceResponse.success
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_exception(e, agent_id: agent.id)
+
+ ServiceResponse.error(message: e.message)
end
private
diff --git a/app/services/clusters/agents/filter_authorizations_service.rb b/app/services/clusters/agents/filter_authorizations_service.rb
deleted file mode 100644
index 68517ceec04..00000000000
--- a/app/services/clusters/agents/filter_authorizations_service.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Agents
- class FilterAuthorizationsService
- def initialize(authorizations, filter_params)
- @authorizations = authorizations
- @filter_params = filter_params
-
- @environments_matcher = {}
- end
-
- def execute
- filter_by_environment(authorizations)
- end
-
- private
-
- attr_reader :authorizations, :filter_params
-
- def filter_by_environment(auths)
- return auths unless filter_by_environment?
-
- auths.select do |auth|
- next true if auth.config['environments'].blank?
-
- auth.config['environments'].any? { |environment_pattern| matches_environment?(environment_pattern) }
- end
- end
-
- def filter_by_environment?
- filter_params.has_key?(:environment)
- end
-
- def environment_filter
- @environment_filter ||= filter_params[:environment]
- end
-
- def matches_environment?(environment_pattern)
- return false if environment_filter.nil?
-
- environments_matcher(environment_pattern).match?(environment_filter)
- end
-
- def environments_matcher(environment_pattern)
- @environments_matcher[environment_pattern] ||= ::Gitlab::Ci::EnvironmentMatcher.new(environment_pattern)
- end
- end
- end
-end
diff --git a/app/services/clusters/agents/refresh_authorization_service.rb b/app/services/clusters/agents/refresh_authorization_service.rb
deleted file mode 100644
index 23ececef6a1..00000000000
--- a/app/services/clusters/agents/refresh_authorization_service.rb
+++ /dev/null
@@ -1,102 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Agents
- class RefreshAuthorizationService
- include Gitlab::Utils::StrongMemoize
-
- AUTHORIZED_ENTITY_LIMIT = 100
-
- delegate :project, to: :agent, private: true
- delegate :root_ancestor, to: :project, private: true
-
- def initialize(agent, config:)
- @agent = agent
- @config = config
- end
-
- def execute
- refresh_projects!
- refresh_groups!
-
- true
- end
-
- private
-
- attr_reader :agent, :config
-
- def refresh_projects!
- if allowed_project_configurations.present?
- project_ids = allowed_project_configurations.map { |config| config.fetch(:project_id) }
-
- agent.with_lock do
- agent.project_authorizations.upsert_all(allowed_project_configurations, unique_by: [:agent_id, :project_id])
- agent.project_authorizations.where.not(project_id: project_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
- end
- else
- agent.project_authorizations.delete_all(:delete_all)
- end
- end
-
- def refresh_groups!
- if allowed_group_configurations.present?
- group_ids = allowed_group_configurations.map { |config| config.fetch(:group_id) }
-
- agent.with_lock do
- agent.group_authorizations.upsert_all(allowed_group_configurations, unique_by: [:agent_id, :group_id])
- agent.group_authorizations.where.not(group_id: group_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
- end
- else
- agent.group_authorizations.delete_all(:delete_all)
- end
- end
-
- def allowed_project_configurations
- strong_memoize(:allowed_project_configurations) do
- project_entries = extract_config_entries(entity: 'projects')
-
- if project_entries
- allowed_projects.where_full_path_in(project_entries.keys).map do |project|
- { project_id: project.id, config: project_entries[project.full_path.downcase] }
- end
- end
- end
- end
-
- def allowed_group_configurations
- strong_memoize(:allowed_group_configurations) do
- group_entries = extract_config_entries(entity: 'groups')
-
- if group_entries
- allowed_groups.where_full_path_in(group_entries.keys).map do |group|
- { group_id: group.id, config: group_entries[group.full_path.downcase] }
- end
- end
- end
- end
-
- def extract_config_entries(entity:)
- config.dig('ci_access', entity)
- &.first(AUTHORIZED_ENTITY_LIMIT)
- &.index_by { |config| config.delete('id').downcase }
- end
-
- def allowed_projects
- root_ancestor.all_projects
- end
-
- def allowed_groups
- if group_root_ancestor?
- root_ancestor.self_and_descendants
- else
- ::Group.none
- end
- end
-
- def group_root_ancestor?
- root_ancestor.group_namespace?
- end
- end
- end
-end
diff --git a/app/services/clusters/applications/base_helm_service.rb b/app/services/clusters/applications/base_helm_service.rb
deleted file mode 100644
index 0c9b41be8d2..00000000000
--- a/app/services/clusters/applications/base_helm_service.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Applications
- class BaseHelmService
- attr_accessor :app
-
- def initialize(app)
- @app = app
- end
-
- protected
-
- def log_error(error)
- meta = {
- error_code: error.respond_to?(:error_code) ? error.error_code : nil,
- service: self.class.name,
- app_id: app.id,
- app_name: app.name,
- project_ids: app.cluster.project_ids,
- group_ids: app.cluster.group_ids
- }
-
- Gitlab::ErrorTracking.track_exception(error, meta)
- end
-
- def log_event(event)
- meta = {
- service: self.class.name,
- app_id: app.id,
- app_name: app.name,
- project_ids: app.cluster.project_ids,
- group_ids: app.cluster.group_ids,
- event: event
- }
-
- logger.info(meta)
- end
-
- def logger
- @logger ||= Gitlab::Kubernetes::Logger.build
- end
-
- def cluster
- app.cluster
- end
-
- def kubeclient
- cluster.kubeclient
- end
-
- def helm_api
- @helm_api ||= Gitlab::Kubernetes::Helm::API.new(kubeclient)
- end
-
- def install_command
- @install_command ||= app.install_command
- end
-
- def update_command
- @update_command ||= app.update_command
- end
-
- def patch_command(new_values = "")
- app.patch_command(new_values)
- end
- end
- end
-end
diff --git a/app/services/commits/change_service.rb b/app/services/commits/change_service.rb
index dc7f84ab807..0b97aae9972 100644
--- a/app/services/commits/change_service.rb
+++ b/app/services/commits/change_service.rb
@@ -29,12 +29,24 @@ module Commits
dry_run: @dry_run
)
rescue Gitlab::Git::Repository::CreateTreeError => ex
- act = action.to_s.dasherize
type = @commit.change_type_title(current_user)
- error_msg = "Sorry, we cannot #{act} this #{type} automatically. " \
- "This #{type} may already have been #{act}ed, or a more recent " \
- "commit may have updated some of its content."
+ status = case [type, action]
+ when ['commit', :cherry_pick]
+ s_("MergeRequests|Commit cherry-pick failed")
+ when ['commit', :revert]
+ s_("MergeRequests|Commit revert failed")
+ when ['merge request', :cherry_pick]
+ s_("MergeRequests|Merge request cherry-pick failed")
+ when ['merge request', :revert]
+ s_("MergeRequests|Merge request revert failed")
+ end
+
+ detail = s_("MergeRequests|Can't perform this action automatically. " \
+ "It may have already been done, or a more recent commit may have updated some of this content. " \
+ "Please perform this action locally.")
+
+ error_msg = "#{status}: #{detail}"
raise ChangeError.new(error_msg, ex.error_code)
end
diff --git a/app/services/concerns/exclusive_lease_guard.rb b/app/services/concerns/exclusive_lease_guard.rb
index 76d59cf2159..74acaa0522a 100644
--- a/app/services/concerns/exclusive_lease_guard.rb
+++ b/app/services/concerns/exclusive_lease_guard.rb
@@ -21,7 +21,7 @@ module ExclusiveLeaseGuard
lease = exclusive_lease.try_obtain
unless lease
- log_error("Cannot obtain an exclusive lease for #{lease_key}. There must be another instance already in execution.")
+ log_lease_taken
return
end
@@ -57,7 +57,23 @@ module ExclusiveLeaseGuard
exclusive_lease.renew
end
- def log_error(message, extra_args = {})
- Gitlab::AppLogger.error(message)
+ def log_lease_taken
+ logger = Gitlab::AppJsonLogger
+ args = { message: lease_taken_message, lease_key: lease_key, class_name: self.class.name, lease_timeout: lease_timeout }
+
+ case lease_taken_log_level
+ when :debug then logger.debug(args)
+ when :info then logger.info(args)
+ when :warn then logger.warn(args)
+ else logger.error(args)
+ end
+ end
+
+ def lease_taken_message
+ "Cannot obtain an exclusive lease. There must be another instance already in execution."
+ end
+
+ def lease_taken_log_level
+ :error
end
end
diff --git a/app/services/concerns/incident_management/usage_data.rb b/app/services/concerns/incident_management/usage_data.rb
index 40183085344..f7edbb80d09 100644
--- a/app/services/concerns/incident_management/usage_data.rb
+++ b/app/services/concerns/incident_management/usage_data.rb
@@ -5,7 +5,7 @@ module IncidentManagement
include Gitlab::Utils::UsageData
def track_incident_action(current_user, target, action)
- return unless target.incident?
+ return unless target.incident_type_issue?
event = "incident_management_#{action}"
track_usage_event(event, current_user.id)
@@ -13,8 +13,6 @@ module IncidentManagement
namespace = target.try(:namespace)
project = target.try(:project)
- return unless Feature.enabled?(:route_hll_to_snowplow_phase2, target.try(:namespace))
-
Gitlab::Tracking.event(
self.class.to_s,
event,
diff --git a/app/services/concerns/issues/resolve_discussions.rb b/app/services/concerns/issues/resolve_discussions.rb
index f0e9862ca30..5e87f610e4e 100644
--- a/app/services/concerns/issues/resolve_discussions.rb
+++ b/app/services/concerns/issues/resolve_discussions.rb
@@ -16,7 +16,11 @@ module Issues
# rubocop: disable CodeReuse/ActiveRecord
def merge_request_to_resolve_discussions_of
strong_memoize(:merge_request_to_resolve_discussions_of) do
- MergeRequestsFinder.new(current_user, project_id: project.id)
+ # sometimes this will be a Group, when work item is created at group level.
+ # Not sure if we will need to handle resolving an MR with an issue at group level?
+ next unless container.is_a?(Project)
+
+ MergeRequestsFinder.new(current_user, project_id: container.id)
.find_by(iid: merge_request_to_resolve_discussions_of_iid)
end
end
diff --git a/app/services/concerns/update_repository_storage_methods.rb b/app/services/concerns/update_repository_storage_methods.rb
index b21d05f4178..a0b4040cff7 100644
--- a/app/services/concerns/update_repository_storage_methods.rb
+++ b/app/services/concerns/update_repository_storage_methods.rb
@@ -28,10 +28,7 @@ module UpdateRepositoryStorageMethods
track_repository(destination_storage_name)
end
- unless same_filesystem?
- remove_old_paths
- enqueue_housekeeping
- end
+ remove_old_paths unless same_filesystem?
repository_storage_move.finish_cleanup!
@@ -95,10 +92,6 @@ module UpdateRepositoryStorageMethods
end
end
- def enqueue_housekeeping
- # no-op
- end
-
def wait_for_pushes(type)
reference_counter = container.reference_counter(type: type)
diff --git a/app/services/concerns/work_items/widgetable_service.rb b/app/services/concerns/work_items/widgetable_service.rb
index 24ade9336b2..9d1132b1aba 100644
--- a/app/services/concerns/work_items/widgetable_service.rb
+++ b/app/services/concerns/work_items/widgetable_service.rb
@@ -2,9 +2,29 @@
module WorkItems
module WidgetableService
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def initialize_callbacks!(work_item)
+ @callbacks = work_item.widgets.filter_map do |widget|
+ callback_class = widget.class.try(:callback_class)
+ callback_params = @widget_params[widget.class.api_symbol]
+
+ if new_type_excludes_widget?(widget)
+ callback_params = {} if callback_params.nil?
+ callback_params[:excluded_in_new_type] = true
+ end
+
+ next if callback_class.nil? || callback_params.blank?
+
+ callback_class.new(issuable: work_item, current_user: current_user, params: callback_params)
+ end
+
+ @callbacks.each(&:after_initialize)
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
def execute_widgets(work_item:, callback:, widget_params: {}, service_params: {})
work_item.widgets.each do |widget|
- widget_service(widget, service_params).try(callback, params: widget_params[widget.class.api_symbol])
+ widget_service(widget, service_params).try(callback, params: widget_params[widget.class.api_symbol] || {})
end
end
@@ -26,5 +46,13 @@ module WorkItems
rescue NameError
nil
end
+
+ private
+
+ def new_type_excludes_widget?(widget)
+ return false unless params[:work_item_type]
+
+ params[:work_item_type].widgets.exclude?(widget.class)
+ end
end
end
diff --git a/app/services/container_expiration_policies/cleanup_service.rb b/app/services/container_expiration_policies/cleanup_service.rb
index 1123b29f217..6c2b41a4daf 100644
--- a/app/services/container_expiration_policies/cleanup_service.rb
+++ b/app/services/container_expiration_policies/cleanup_service.rb
@@ -19,7 +19,6 @@ module ContainerExpirationPolicies
return ServiceResponse.error(message: 'invalid policy')
end
- repository.start_expiration_policy!
schedule_next_run_if_needed
begin
diff --git a/app/services/dependency_proxy/head_manifest_service.rb b/app/services/dependency_proxy/head_manifest_service.rb
index cd575b83a98..5bc5cb45a12 100644
--- a/app/services/dependency_proxy/head_manifest_service.rb
+++ b/app/services/dependency_proxy/head_manifest_service.rb
@@ -2,7 +2,7 @@
module DependencyProxy
class HeadManifestService < DependencyProxy::BaseService
- ACCEPT_HEADERS = ::ContainerRegistry::Client::ACCEPTED_TYPES.join(',')
+ ACCEPT_HEADERS = DependencyProxy::Manifest::ACCEPTED_TYPES.join(',')
def initialize(image, tag, token)
@image = image
diff --git a/app/services/discussions/update_diff_position_service.rb b/app/services/discussions/update_diff_position_service.rb
index 7bdf7711155..31816b46c52 100644
--- a/app/services/discussions/update_diff_position_service.rb
+++ b/app/services/discussions/update_diff_position_service.rb
@@ -25,7 +25,7 @@ module Discussions
Note.transaction do
discussion.notes.each do |note|
- Gitlab::Timeless.timeless(note, &:save)
+ note.save(touch: false)
end
if outdated && current_user
diff --git a/app/services/draft_notes/publish_service.rb b/app/services/draft_notes/publish_service.rb
index fab7a227e7d..9e1e381c568 100644
--- a/app/services/draft_notes/publish_service.rb
+++ b/app/services/draft_notes/publish_service.rb
@@ -59,7 +59,8 @@ module DraftNotes
note_params = draft.publish_params.merge(skip_keep_around_commits: skip_keep_around_commits)
note = Notes::CreateService.new(draft.project, draft.author, note_params).execute(
skip_capture_diff_note_position: skip_capture_diff_note_position,
- skip_merge_status_trigger: skip_merge_status_trigger
+ skip_merge_status_trigger: skip_merge_status_trigger,
+ skip_set_reviewed: true
)
set_discussion_resolve_status(note, draft)
diff --git a/app/services/environments/stop_service.rb b/app/services/environments/stop_service.rb
index fb14ee40c05..1b2e7ef3cf9 100644
--- a/app/services/environments/stop_service.rb
+++ b/app/services/environments/stop_service.rb
@@ -5,13 +5,27 @@ module Environments
attr_reader :ref
def execute(environment)
- return unless can?(current_user, :stop_environment, environment)
+ unless can?(current_user, :stop_environment, environment)
+ return ServiceResponse.error(
+ message: 'Unauthorized to stop the environment',
+ payload: { environment: environment }
+ )
+ end
if params[:force]
environment.stop_complete!
else
environment.stop_with_actions!(current_user)
end
+
+ unless environment.saved_change_to_attribute?(:state)
+ return ServiceResponse.error(
+ message: 'Attemped to stop the environment but failed to change the status',
+ payload: { environment: environment }
+ )
+ end
+
+ ServiceResponse.success(payload: { environment: environment })
end
def execute_for_branch(branch_name)
diff --git a/app/services/error_tracking/list_projects_service.rb b/app/services/error_tracking/list_projects_service.rb
index d52306ef805..35a8179d54d 100644
--- a/app/services/error_tracking/list_projects_service.rb
+++ b/app/services/error_tracking/list_projects_service.rb
@@ -2,8 +2,6 @@
module ErrorTracking
class ListProjectsService < ErrorTracking::BaseService
- MASKED_TOKEN_REGEX = /\A\*+\z/.freeze
-
private
def perform
@@ -46,7 +44,7 @@ module ErrorTracking
end
def masked_token?
- MASKED_TOKEN_REGEX.match?(params[:token])
+ ErrorTracking::SentryClient::Token.masked_token?(params[:token])
end
end
end
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index d848f694598..1893cfcfcff 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -12,7 +12,7 @@ class EventCreateService
DEGIGN_EVENT_LABEL = 'usage_activity_by_stage_monthly.create.action_monthly_active_users_design_management'
MR_EVENT_LABEL = 'usage_activity_by_stage_monthly.create.merge_requests_users'
- MR_EVENT_PROPERTY = 'merge_requests_users'
+ MR_EVENT_PROPERTY = 'merge_request_action'
def open_issue(issue, current_user)
create_record_event(issue, current_user, :created)
@@ -28,7 +28,7 @@ class EventCreateService
def open_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :created).tap do
- track_event(event_action: :created, event_target: MergeRequest, author_id: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :created,
project: merge_request.project,
@@ -41,7 +41,7 @@ class EventCreateService
def close_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :closed).tap do
- track_event(event_action: :closed, event_target: MergeRequest, author_id: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :closed,
project: merge_request.project,
@@ -58,7 +58,7 @@ class EventCreateService
def merge_mr(merge_request, current_user)
create_record_event(merge_request, current_user, :merged).tap do
- track_event(event_action: :merged, event_target: MergeRequest, author_id: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :merged,
project: merge_request.project,
@@ -88,7 +88,7 @@ class EventCreateService
def leave_note(note, current_user)
create_record_event(note, current_user, :commented).tap do
if note.is_a?(DiffNote) && note.for_merge_request?
- track_event(event_action: :commented, event_target: MergeRequest, author_id: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: current_user.id)
track_snowplow_event(
action: :commented,
project: note.project,
@@ -128,12 +128,17 @@ class EventCreateService
records = create.zip([:created].cycle) + update.zip([:updated].cycle)
return [] if records.empty?
- event_meta = { user: current_user, label: DEGIGN_EVENT_LABEL, property: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION }
+ event_meta = { user: current_user, label: DEGIGN_EVENT_LABEL, property: :design_action }
track_snowplow_event(action: :create, project: create.first.project, **event_meta) if create.any?
track_snowplow_event(action: :update, project: update.first.project, **event_meta) if update.any?
- create_record_events(records, current_user)
+ inserted_events = create_record_events(records, current_user)
+
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:design_action, values: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: current_user.id)
+
+ inserted_events
end
def destroy_designs(designs, current_user)
@@ -144,9 +149,15 @@ class EventCreateService
project: designs.first.project,
user: current_user,
label: DEGIGN_EVENT_LABEL,
- property: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION
+ property: :design_action
)
- create_record_events(designs.zip([:destroyed].cycle), current_user)
+
+ inserted_events = create_record_events(designs.zip([:destroyed].cycle), current_user)
+
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:design_action, values: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: current_user.id)
+
+ inserted_events
end
# Create a new wiki page event
@@ -163,7 +174,8 @@ class EventCreateService
def wiki_event(wiki_page_meta, author, action, fingerprint)
raise IllegalActionError, action unless Event::WIKI_ACTIONS.include?(action)
- track_event(event_action: action, event_target: wiki_page_meta.class, author_id: author.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:wiki_action, values: author.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: author.id)
duplicate = Event.for_wiki_meta(wiki_page_meta).for_fingerprint(fingerprint).first
return duplicate if duplicate.present?
@@ -205,13 +217,7 @@ class EventCreateService
.merge(action: action, fingerprint: fingerprint, target_id: record.id, target_type: record.class.name)
end
- result = Event.insert_all(attribute_sets, returning: %w[id])
-
- tuples.each do |record, status, _|
- track_event(event_action: status, event_target: record.class, author_id: current_user.id)
- end
-
- result
+ Event.insert_all(attribute_sets, returning: %w[id])
end
def create_push_event(service_class, project, current_user, push_data)
@@ -226,7 +232,8 @@ class EventCreateService
new_event
end
- track_event(event_action: :pushed, event_target: Project, author_id: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:project_action, values: current_user.id)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:git_write_action, values: current_user.id)
namespace = project.namespace
Gitlab::Tracking.event(
@@ -273,13 +280,7 @@ class EventCreateService
{ resource_parent_attr => resource_parent.id }
end
- def track_event(...)
- Gitlab::UsageDataCounters::TrackUniqueEvents.track_event(...)
- end
-
def track_snowplow_event(action:, project:, user:, label:, property:)
- return unless Feature.enabled?(:route_hll_to_snowplow_phase2)
-
Gitlab::Tracking.event(
self.class.to_s,
action.to_s,
diff --git a/app/services/feature_flags/base_service.rb b/app/services/feature_flags/base_service.rb
index 59db1a5f12f..028906a0b43 100644
--- a/app/services/feature_flags/base_service.rb
+++ b/app/services/feature_flags/base_service.rb
@@ -7,42 +7,24 @@ module FeatureFlags
AUDITABLE_ATTRIBUTES = %w(name description active).freeze
def success(**args)
- audit_event = args.fetch(:audit_event) { audit_event(args[:feature_flag]) }
- save_audit_event(audit_event)
sync_to_jira(args[:feature_flag])
+
+ audit_event(args[:feature_flag], args[:audit_context])
super
end
protected
- def update_last_feature_flag_updated_at!
- Operations::FeatureFlagsClient.update_last_feature_flag_updated_at!(project)
- end
-
- def audit_event(feature_flag)
- message = audit_message(feature_flag)
+ def audit_event(feature_flag, context = nil)
+ context ||= audit_context(feature_flag)
- return if message.blank?
+ return if context[:message].blank?
- details =
- {
- custom_message: message,
- target_id: feature_flag.id,
- target_type: feature_flag.class.name,
- target_details: feature_flag.name
- }
-
- ::AuditEventService.new(
- current_user,
- feature_flag.project,
- details
- )
+ ::Gitlab::Audit::Auditor.audit(context)
end
- def save_audit_event(audit_event)
- return unless audit_event
-
- audit_event.security_event
+ def update_last_feature_flag_updated_at!
+ Operations::FeatureFlagsClient.update_last_feature_flag_updated_at!(project)
end
def sync_to_jira(feature_flag)
diff --git a/app/services/feature_flags/create_service.rb b/app/services/feature_flags/create_service.rb
index 6ea40345191..2a3153e6a54 100644
--- a/app/services/feature_flags/create_service.rb
+++ b/app/services/feature_flags/create_service.rb
@@ -21,6 +21,16 @@ module FeatureFlags
private
+ def audit_context(feature_flag)
+ {
+ name: 'feature_flag_created',
+ message: audit_message(feature_flag),
+ author: current_user,
+ scope: feature_flag.project,
+ target: feature_flag
+ }
+ end
+
def audit_message(feature_flag)
message_parts = ["Created feature flag #{feature_flag.name} with description \"#{feature_flag.description}\"."]
diff --git a/app/services/feature_flags/destroy_service.rb b/app/services/feature_flags/destroy_service.rb
index 0fdc890b8a3..fdcbb802b16 100644
--- a/app/services/feature_flags/destroy_service.rb
+++ b/app/services/feature_flags/destroy_service.rb
@@ -22,6 +22,16 @@ module FeatureFlags
end
end
+ def audit_context(feature_flag)
+ {
+ name: 'feature_flag_deleted',
+ message: audit_message(feature_flag),
+ author: current_user,
+ scope: feature_flag.project,
+ target: feature_flag
+ }
+ end
+
def audit_message(feature_flag)
"Deleted feature flag #{feature_flag.name}."
end
diff --git a/app/services/feature_flags/update_service.rb b/app/services/feature_flags/update_service.rb
index a465ca1dd5f..555b5a93d23 100644
--- a/app/services/feature_flags/update_service.rb
+++ b/app/services/feature_flags/update_service.rb
@@ -25,13 +25,13 @@ module FeatureFlags
end
end
- # We generate the audit event before the feature flag is saved as #changed_strategies_messages depends on the strategies' states before save
- audit_event = audit_event(feature_flag)
+ # We generate the audit context before the feature flag is saved as #changed_strategies_messages depends on the strategies' states before save
+ saved_audit_context = audit_context feature_flag
if feature_flag.save
update_last_feature_flag_updated_at!
- success(feature_flag: feature_flag, audit_event: audit_event)
+ success(feature_flag: feature_flag, audit_context: saved_audit_context)
else
error(feature_flag.errors.full_messages, :bad_request)
end
@@ -50,6 +50,16 @@ module FeatureFlags
end
end
+ def audit_context(feature_flag)
+ {
+ name: 'feature_flag_updated',
+ message: audit_message(feature_flag),
+ author: current_user,
+ scope: feature_flag.project,
+ target: feature_flag
+ }
+ end
+
def audit_message(feature_flag)
changes = changed_attributes_messages(feature_flag)
changes += changed_strategies_messages(feature_flag)
diff --git a/app/services/files/base_service.rb b/app/services/files/base_service.rb
index 8f722de2019..613785d01cc 100644
--- a/app/services/files/base_service.rb
+++ b/app/services/files/base_service.rb
@@ -26,16 +26,23 @@ module Files
def file_has_changed?(path, commit_id)
return false unless commit_id
- last_commit = Gitlab::Git::Commit
- .last_for_path(@start_project.repository, @start_branch, path, literal_pathspec: true)
+ last_commit_from_branch = get_last_commit_for_path(ref: @start_branch, path: path)
- return false unless last_commit
+ return false unless last_commit_from_branch
- last_commit.sha != commit_id
+ last_commit_from_commit_id = get_last_commit_for_path(ref: commit_id, path: path)
+
+ return false unless last_commit_from_commit_id
+
+ last_commit_from_branch.sha != last_commit_from_commit_id.sha
end
private
+ def get_last_commit_for_path(ref:, path:)
+ Gitlab::Git::Commit.last_for_path(@start_project.repository, ref, path, literal_pathspec: true)
+ end
+
def commit_email(git_user)
return params[:author_email] if params[:author_email].present?
return unless current_user
diff --git a/app/services/git/base_hooks_service.rb b/app/services/git/base_hooks_service.rb
index 7158116fde1..acf54dec51b 100644
--- a/app/services/git/base_hooks_service.rb
+++ b/app/services/git/base_hooks_service.rb
@@ -15,6 +15,7 @@ module Git
# Not a hook, but it needs access to the list of changed commits
enqueue_invalidate_cache
+ enqueue_notify_kas
success
end
@@ -77,6 +78,13 @@ module Git
ProjectCacheWorker.perform_async(project.id, file_types, [], false)
end
+ def enqueue_notify_kas
+ return unless Gitlab::Kas.enabled?
+ return unless Feature.enabled?(:notify_kas_on_git_push, project)
+
+ Clusters::Agents::NotifyGitPushWorker.perform_async(project.id)
+ end
+
def pipeline_params
strong_memoize(:pipeline_params) do
{
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index 6087efce9fd..2ead2e2a113 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -156,7 +156,7 @@ module Git
def enqueue_jira_connect_sync_messages
return unless project.jira_subscription_exists?
- branch_to_sync = branch_name if Atlassian::JiraIssueKeyExtractor.has_keys?(branch_name)
+ branch_to_sync = branch_name if Atlassian::JiraIssueKeyExtractors::Branch.has_keys?(project, branch_name)
commits_to_sync = limited_commits.select { |commit| Atlassian::JiraIssueKeyExtractor.has_keys?(commit.safe_message) }.map(&:sha)
if branch_to_sync || commits_to_sync.any?
diff --git a/app/services/google_cloud/generate_pipeline_service.rb b/app/services/google_cloud/generate_pipeline_service.rb
index b6438d6f501..791be69f4d4 100644
--- a/app/services/google_cloud/generate_pipeline_service.rb
+++ b/app/services/google_cloud/generate_pipeline_service.rb
@@ -61,7 +61,7 @@ module GoogleCloud
end
def pipeline_content(include_path)
- gitlab_ci_yml = Gitlab::Config::Loader::Yaml.new(default_branch_gitlab_ci_yml || '{}').load!
+ gitlab_ci_yml = ::Gitlab::Ci::Config::Yaml.load!(default_branch_gitlab_ci_yml || '{}')
append_remote_include(gitlab_ci_yml, "https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/library/-/raw/main/#{include_path}")
end
diff --git a/app/services/groups/autocomplete_service.rb b/app/services/groups/autocomplete_service.rb
index 92b05d9ac08..5b9d60495e9 100644
--- a/app/services/groups/autocomplete_service.rb
+++ b/app/services/groups/autocomplete_service.rb
@@ -13,7 +13,7 @@ module Groups
IssuesFinder.new(current_user, finder_params)
.execute
.preload(project: :namespace)
- .select(:iid, :title, :project_id)
+ .select(:iid, :title, :project_id, :namespace_id)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/groups/group_links/create_service.rb b/app/services/groups/group_links/create_service.rb
index 9c1a003ff36..a6e2c0b952e 100644
--- a/app/services/groups/group_links/create_service.rb
+++ b/app/services/groups/group_links/create_service.rb
@@ -36,3 +36,5 @@ module Groups
end
end
end
+
+Groups::GroupLinks::CreateService.prepend_mod
diff --git a/app/services/groups/group_links/destroy_service.rb b/app/services/groups/group_links/destroy_service.rb
index dc3cab927be..8eed46b28ca 100644
--- a/app/services/groups/group_links/destroy_service.rb
+++ b/app/services/groups/group_links/destroy_service.rb
@@ -24,7 +24,11 @@ module Groups
Gitlab::AppLogger.info(
"Failed to delete GroupGroupLinks with ids: #{links.map(&:id)}.")
end
+
+ links
end
end
end
end
+
+Groups::GroupLinks::DestroyService.prepend_mod
diff --git a/app/services/groups/group_links/update_service.rb b/app/services/groups/group_links/update_service.rb
index 66d0d63cb9b..913bf2bfce7 100644
--- a/app/services/groups/group_links/update_service.rb
+++ b/app/services/groups/group_links/update_service.rb
@@ -15,6 +15,8 @@ module Groups
if requires_authorization_refresh?(group_link_params)
group_link.shared_with_group.refresh_members_authorized_projects(direct_members_only: true)
end
+
+ group_link
end
private
@@ -27,3 +29,5 @@ module Groups
end
end
end
+
+Groups::GroupLinks::UpdateService.prepend_mod
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index 7e9fd9dad54..1c8df157716 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -60,7 +60,7 @@ module Groups
raise_transfer_error(:namespace_with_same_path) if namespace_with_same_path?
raise_transfer_error(:group_contains_images) if group_projects_contain_registry_images?
raise_transfer_error(:cannot_transfer_to_subgroup) if transfer_to_subgroup?
- raise_transfer_error(:group_contains_npm_packages) if group_with_npm_packages?
+ raise_transfer_error(:group_contains_namespaced_npm_packages) if group_with_namespaced_npm_packages?
raise_transfer_error(:no_permissions_to_migrate_crm) if no_permissions_to_migrate_crm?
end
@@ -74,10 +74,11 @@ module Groups
false
end
- def group_with_npm_packages?
+ def group_with_namespaced_npm_packages?
return false unless group.packages_feature_enabled?
- npm_packages = ::Packages::GroupPackagesFinder.new(current_user, group, package_type: :npm).execute
+ npm_packages = ::Packages::GroupPackagesFinder.new(current_user, group, package_type: :npm, preload_pipelines: false).execute
+ npm_packages = npm_packages.with_npm_scope(group.root_ancestor.path)
different_root_ancestor? && npm_packages.exists?
end
@@ -219,7 +220,7 @@ module Groups
invalid_policies: s_("TransferGroup|You don't have enough permissions."),
group_contains_images: s_('TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.'),
cannot_transfer_to_subgroup: s_('TransferGroup|Cannot transfer group to one of its subgroup.'),
- group_contains_npm_packages: s_('TransferGroup|Group contains projects with NPM packages.'),
+ group_contains_namespaced_npm_packages: s_('TransferGroup|Group contains projects with NPM packages scoped to the current root level group.'),
no_permissions_to_migrate_crm: s_("TransferGroup|Group contains contacts/organizations and you don't have enough permissions to move them to the new root group.")
}.freeze
end
diff --git a/app/services/import/base_service.rb b/app/services/import/base_service.rb
index 6b5adcbc39e..64cf3cfa04a 100644
--- a/app/services/import/base_service.rb
+++ b/app/services/import/base_service.rb
@@ -9,7 +9,7 @@ module Import
end
def authorized?
- can?(current_user, :create_projects, target_namespace)
+ can?(current_user, :import_projects, target_namespace)
end
private
diff --git a/app/services/import/bitbucket_server_service.rb b/app/services/import/bitbucket_server_service.rb
index f7f17f1e53e..5d496dc7cc3 100644
--- a/app/services/import/bitbucket_server_service.rb
+++ b/app/services/import/bitbucket_server_service.rb
@@ -10,7 +10,7 @@ module Import
end
unless authorized?
- return log_and_return_error("You don't have permissions to create this project", :unauthorized)
+ return log_and_return_error("You don't have permissions to import this project", :unauthorized)
end
unless repo
diff --git a/app/services/import/fogbugz_service.rb b/app/services/import/fogbugz_service.rb
index d1003823456..9a8def43312 100644
--- a/app/services/import/fogbugz_service.rb
+++ b/app/services/import/fogbugz_service.rb
@@ -13,8 +13,8 @@ module Import
unless authorized?
return log_and_return_error(
- "You don't have permissions to create this project",
- _("You don't have permissions to create this project"),
+ "You don't have permissions to import this project",
+ _("You don't have permissions to import this project"),
:unauthorized
)
end
diff --git a/app/services/import/github/cancel_project_import_service.rb b/app/services/import/github/cancel_project_import_service.rb
index 5dce5e73662..62cd0c95eaf 100644
--- a/app/services/import/github/cancel_project_import_service.rb
+++ b/app/services/import/github/cancel_project_import_service.rb
@@ -9,6 +9,8 @@ module Import
if project.import_in_progress?
project.import_state.cancel
+ metrics.track_canceled_import
+
success(project: project)
else
error(cannot_cancel_error_message, :bad_request)
@@ -31,6 +33,10 @@ module Import
project_status: project.import_state.status
)
end
+
+ def metrics
+ @metrics ||= Gitlab::Import::Metrics.new(:github_importer, project)
+ end
end
end
end
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index b30c344723d..7e7f7ea9810 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -103,7 +103,7 @@ module Import
elsif target_namespace.nil?
error(_('Namespace or group to import repository into does not exist.'), :unprocessable_entity)
elsif !authorized?
- error(_('This namespace has already been taken. Choose a different one.'), :unprocessable_entity)
+ error(_('You are not allowed to import projects in this namespace.'), :unprocessable_entity)
elsif oversized?
error(oversize_error_message, :unprocessable_entity)
end
diff --git a/app/services/import/validate_remote_git_endpoint_service.rb b/app/services/import/validate_remote_git_endpoint_service.rb
index 1b8fa45e979..2886bd5c9b7 100644
--- a/app/services/import/validate_remote_git_endpoint_service.rb
+++ b/app/services/import/validate_remote_git_endpoint_service.rb
@@ -21,7 +21,9 @@ module Import
def execute
uri = Gitlab::Utils.parse_url(@params[:url])
- return ServiceResponse.error(message: "#{@params[:url]} is not a valid URL") unless uri
+ if !uri || !uri.hostname || Project::VALID_IMPORT_PROTOCOLS.exclude?(uri.scheme)
+ return ServiceResponse.error(message: "#{@params[:url]} is not a valid URL")
+ end
return ServiceResponse.success if uri.scheme == 'git'
diff --git a/app/services/import_csv/base_service.rb b/app/services/import_csv/base_service.rb
index feb76425fb4..70834b8a85a 100644
--- a/app/services/import_csv/base_service.rb
+++ b/app/services/import_csv/base_service.rb
@@ -2,6 +2,8 @@
module ImportCsv
class BaseService
+ include Gitlab::Utils::StrongMemoize
+
def initialize(user, project, csv_io)
@user = user
@project = project
@@ -9,6 +11,8 @@ module ImportCsv
@results = { success: 0, error_lines: [], parse_error: false }
end
+ PreprocessError = Class.new(StandardError)
+
def execute
process_csv
email_results_to_user
@@ -36,7 +40,23 @@ module ImportCsv
raise NotImplementedError
end
+ def validate_structure!
+ header_line = csv_data.lines.first
+ raise CSV::MalformedCSVError.new('File is empty, no headers found', 1) if header_line.blank?
+
+ validate_headers_presence!(header_line)
+ detect_col_sep
+ end
+
+ def preprocess!
+ # any logic can be added in subclasses if needed
+ # hence just a no-op rather than NotImplementedError
+ end
+
def process_csv
+ validate_structure!
+ preprocess!
+
with_csv_lines.each do |row, line_no|
attributes = attributes_for(row)
@@ -46,23 +66,30 @@ module ImportCsv
results[:error_lines].push(line_no)
end
end
- rescue ArgumentError, CSV::MalformedCSVError
+ rescue ArgumentError, CSV::MalformedCSVError => e
results[:parse_error] = true
+ results[:error_lines].push(e.line_number) if e.respond_to?(:line_number)
+ rescue PreprocessError
+ results[:parse_error] = false
end
def with_csv_lines
- csv_data = @csv_io.open(&:read).force_encoding(Encoding::UTF_8)
- validate_headers_presence!(csv_data.lines.first)
-
CSV.new(
csv_data,
- col_sep: detect_col_sep(csv_data.lines.first),
+ col_sep: detect_col_sep,
headers: true,
header_converters: :symbol
).each.with_index(2)
end
- def detect_col_sep(header)
+ def csv_data
+ @csv_io.open(&:read).force_encoding(Encoding::UTF_8)
+ end
+ strong_memoize_attr :csv_data
+
+ def detect_col_sep
+ header = csv_data.lines.first
+
if header.include?(",")
","
elsif header.include?(";")
@@ -73,6 +100,7 @@ module ImportCsv
raise CSV::MalformedCSVError.new('Invalid CSV format', 1)
end
end
+ strong_memoize_attr :detect_col_sep
def create_object(attributes)
# NOTE: CSV imports are performed by workers, so we do not have a request context in order
diff --git a/app/services/incident_management/timeline_events/base_service.rb b/app/services/incident_management/timeline_events/base_service.rb
index e997d940ed4..75a3811af2d 100644
--- a/app/services/incident_management/timeline_events/base_service.rb
+++ b/app/services/incident_management/timeline_events/base_service.rb
@@ -29,8 +29,6 @@ module IncidentManagement
namespace = project.namespace
track_usage_event(event, user.id)
- return unless Feature.enabled?(:route_hll_to_snowplow_phase2, namespace)
-
Gitlab::Tracking.event(
self.class.to_s,
event,
diff --git a/app/services/integrations/slack_event_service.rb b/app/services/integrations/slack_event_service.rb
new file mode 100644
index 00000000000..65f3c226e34
--- /dev/null
+++ b/app/services/integrations/slack_event_service.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+# Performs the initial handling of event payloads sent from Slack to GitLab.
+# See `API::Integrations::Slack::Events` which calls this service.
+module Integrations
+ class SlackEventService
+ URL_VERIFICATION_EVENT = 'url_verification'
+
+ UnknownEventError = Class.new(StandardError)
+
+ def initialize(params)
+ # When receiving URL verification events, params[:type] is 'url_verification'.
+ # For all other events we subscribe to, params[:type] is 'event_callback' and
+ # the specific type of the event will be in params[:event][:type].
+ # Remove both of these from the params before they are passed to the services.
+ type = params.delete(:type)
+ type = params[:event].delete(:type) if type == 'event_callback'
+
+ @slack_event = type
+ @params = params
+ end
+
+ def execute
+ raise UnknownEventError, "Unable to handle event type: '#{slack_event}'" unless routable_event?
+
+ payload = route_event
+
+ ServiceResponse.success(payload: payload)
+ end
+
+ private
+
+ # The `url_verification` slack_event response must be returned to Slack in-request,
+ # so for this event we call the service directly instead of through a worker.
+ #
+ # All other events must be handled asynchronously in order to return a 2xx response
+ # immediately to Slack in the request. See https://api.slack.com/apis/connections/events-api.
+ def route_in_request?
+ slack_event == URL_VERIFICATION_EVENT
+ end
+
+ def routable_event?
+ route_in_request? || route_to_event_worker?
+ end
+
+ def route_to_event_worker?
+ SlackEventWorker.event?(slack_event)
+ end
+
+ # Returns a payload for the service response.
+ def route_event
+ return SlackEvents::UrlVerificationService.new(params).execute if route_in_request?
+
+ SlackEventWorker.perform_async(slack_event: slack_event, params: params)
+
+ {}
+ end
+
+ attr_reader :slack_event, :params
+ end
+end
diff --git a/app/services/integrations/slack_events/app_home_opened_service.rb b/app/services/integrations/slack_events/app_home_opened_service.rb
new file mode 100644
index 00000000000..48dda324270
--- /dev/null
+++ b/app/services/integrations/slack_events/app_home_opened_service.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+# Handles the Slack `app_home_opened` event sent from Slack to GitLab.
+# Responds with a POST to the Slack API 'views.publish' method.
+#
+# See:
+# - https://api.slack.com/methods/views.publish
+# - https://api.slack.com/events/app_home_opened
+module Integrations
+ module SlackEvents
+ class AppHomeOpenedService
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(params)
+ @slack_user_id = params.dig(:event, :user)
+ @slack_workspace_id = params[:team_id]
+ end
+
+ def execute
+ # Legacy Slack App integrations will not yet have a token we can use
+ # to call the Slack API. Do nothing, and consider the service successful.
+ unless slack_installation
+ logger.info(
+ slack_user_id: slack_user_id,
+ slack_workspace_id: slack_workspace_id,
+ message: 'SlackInstallation record has no bot token'
+ )
+
+ return ServiceResponse.success
+ end
+
+ begin
+ response = ::Slack::API.new(slack_installation).post(
+ 'views.publish',
+ payload
+ )
+ rescue *Gitlab::HTTP::HTTP_ERRORS => e
+ return ServiceResponse
+ .error(message: 'HTTP exception when calling Slack API')
+ .track_exception(
+ as: e.class,
+ slack_user_id: slack_user_id,
+ slack_workspace_id: slack_workspace_id
+ )
+ end
+
+ return ServiceResponse.success if response['ok']
+
+ # For a list of errors, see:
+ # https://api.slack.com/methods/views.publish#errors
+ ServiceResponse.error(
+ message: 'Slack API returned an error',
+ payload: response
+ ).track_exception(
+ slack_user_id: slack_user_id,
+ slack_workspace_id: slack_workspace_id,
+ response: response.to_h
+ )
+ end
+
+ private
+
+ def slack_installation
+ SlackIntegration.with_bot.find_by_team_id(slack_workspace_id)
+ end
+ strong_memoize_attr :slack_installation
+
+ def slack_gitlab_user_connection
+ ChatNames::FindUserService.new(slack_workspace_id, slack_user_id).execute
+ end
+ strong_memoize_attr :slack_gitlab_user_connection
+
+ def payload
+ {
+ user_id: slack_user_id,
+ view: ::Slack::BlockKit::AppHomeOpened.new(
+ slack_user_id,
+ slack_workspace_id,
+ slack_gitlab_user_connection,
+ slack_installation
+ ).build
+ }
+ end
+
+ def logger
+ Gitlab::IntegrationsLogger
+ end
+
+ attr_reader :slack_user_id, :slack_workspace_id
+ end
+ end
+end
diff --git a/app/services/integrations/slack_events/url_verification_service.rb b/app/services/integrations/slack_events/url_verification_service.rb
new file mode 100644
index 00000000000..dbe2ffc77f8
--- /dev/null
+++ b/app/services/integrations/slack_events/url_verification_service.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+# Returns the special URL verification response expected by Slack when the
+# GitLab Slack app is first configured to receive Slack events.
+#
+# Slack will issue the challenge request to the endpoint that receives events
+# and expect it to respond with same the `challenge` param back.
+#
+# See https://api.slack.com/apis/connections/events-api.
+module Integrations
+ module SlackEvents
+ class UrlVerificationService
+ def initialize(params)
+ @challenge = params[:challenge]
+ end
+
+ def execute
+ { challenge: challenge }
+ end
+
+ private
+
+ attr_reader :challenge
+ end
+ end
+end
diff --git a/app/services/integrations/slack_interaction_service.rb b/app/services/integrations/slack_interaction_service.rb
new file mode 100644
index 00000000000..30e1a396f0d
--- /dev/null
+++ b/app/services/integrations/slack_interaction_service.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Integrations
+ class SlackInteractionService
+ UnknownInteractionError = Class.new(StandardError)
+
+ INTERACTIONS = {
+ 'view_closed' => SlackInteractions::IncidentManagement::IncidentModalClosedService,
+ 'view_submission' => SlackInteractions::IncidentManagement::IncidentModalSubmitService,
+ 'block_actions' => SlackInteractions::BlockActionService
+ }.freeze
+
+ def initialize(params)
+ @interaction_type = params.delete(:type)
+ @params = params
+ end
+
+ def execute
+ raise UnknownInteractionError, "Unable to handle interaction type: '#{interaction_type}'" \
+ unless interaction?(interaction_type)
+
+ service_class = INTERACTIONS[interaction_type]
+ service_class.new(params).execute
+
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :interaction_type, :params
+
+ def interaction?(type)
+ INTERACTIONS.key?(type)
+ end
+ end
+end
diff --git a/app/services/integrations/slack_interactions/block_action_service.rb b/app/services/integrations/slack_interactions/block_action_service.rb
new file mode 100644
index 00000000000..d135635fda4
--- /dev/null
+++ b/app/services/integrations/slack_interactions/block_action_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInteractions
+ class BlockActionService
+ ALLOWED_UPDATES_HANDLERS = {
+ 'incident_management_project' => SlackInteractions::SlackBlockActions::IncidentManagement::ProjectUpdateHandler
+ }.freeze
+
+ def initialize(params)
+ @params = params
+ end
+
+ def execute
+ actions.each do |action|
+ action_id = action[:action_id]
+
+ action_handler_class = ALLOWED_UPDATES_HANDLERS[action_id]
+ action_handler_class.new(params, action).execute
+ end
+ end
+
+ private
+
+ def actions
+ params[:actions].select { |action| ALLOWED_UPDATES_HANDLERS[action[:action_id]] }
+ end
+
+ attr_accessor :params
+ end
+ end
+end
diff --git a/app/services/integrations/slack_interactions/incident_management/incident_modal_closed_service.rb b/app/services/integrations/slack_interactions/incident_management/incident_modal_closed_service.rb
new file mode 100644
index 00000000000..9daa5d76df7
--- /dev/null
+++ b/app/services/integrations/slack_interactions/incident_management/incident_modal_closed_service.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInteractions
+ module IncidentManagement
+ class IncidentModalClosedService
+ def initialize(params)
+ @params = params
+ end
+
+ def execute
+ begin
+ response = close_modal
+ rescue *Gitlab::HTTP::HTTP_ERRORS => e
+ return ServiceResponse
+ .error(message: 'HTTP exception when calling Slack API')
+ .track_exception(
+ params: params,
+ as: e.class
+ )
+ end
+
+ return ServiceResponse.success if response['ok']
+
+ ServiceResponse.error(
+ message: _('Something went wrong while closing the incident form.'),
+ payload: response
+ ).track_exception(
+ response: response.to_h,
+ params: params
+ )
+ end
+
+ private
+
+ attr_accessor :params
+
+ def close_modal
+ request_body = Gitlab::Json.dump(close_request_body)
+ response_url = params.dig(:view, :private_metadata)
+
+ Gitlab::HTTP.post(response_url, body: request_body, headers: headers)
+ end
+
+ def close_request_body
+ {
+ replace_original: 'true',
+ text: _('Incident creation cancelled.')
+ }
+ end
+
+ def headers
+ { 'Content-Type' => 'application/json' }
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_interactions/incident_management/incident_modal_opened_service.rb b/app/services/integrations/slack_interactions/incident_management/incident_modal_opened_service.rb
new file mode 100644
index 00000000000..b7940a5126e
--- /dev/null
+++ b/app/services/integrations/slack_interactions/incident_management/incident_modal_opened_service.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInteractions
+ module IncidentManagement
+ class IncidentModalOpenedService
+ MAX_PROJECTS = 100
+ CACHE_EXPIRES_IN = 5.minutes
+
+ def initialize(slack_installation, current_user, params)
+ @slack_installation = slack_installation
+ @current_user = current_user
+ @team_id = params[:team_id]
+ @response_url = params[:response_url]
+ @trigger_id = params[:trigger_id]
+ end
+
+ def execute
+ if user_projects.empty?
+ return ServiceResponse.error(message: _('You do not have access to any projects for creating incidents.'))
+ end
+
+ post_modal
+ end
+
+ def self.cache_write(view_id, project_id)
+ Rails.cache.write(cache_build_key(view_id), project_id, expires_in: CACHE_EXPIRES_IN)
+ end
+
+ def self.cache_read(view_id)
+ Rails.cache.read(cache_build_key(view_id))
+ end
+
+ private
+
+ attr_reader :slack_installation, :current_user, :team_id, :response_url, :trigger_id
+
+ def self.cache_build_key(view_id)
+ "slack:incident_modal_opened:#{view_id}"
+ end
+
+ def user_projects
+ current_user.projects_where_can_admin_issues.limit(MAX_PROJECTS)
+ end
+
+ def post_modal
+ begin
+ response = ::Slack::API.new(slack_installation).post(
+ 'views.open',
+ modal_view
+ )
+ rescue *Gitlab::HTTP::HTTP_ERRORS => e
+ return ServiceResponse
+ .error(message: 'HTTP exception when calling Slack API')
+ .track_exception(
+ as: e.class,
+ slack_workspace_id: team_id
+ )
+ end
+
+ if response['ok']
+ self.class.cache_write(view_id(response), project_id(response))
+
+ return ServiceResponse.success(message: _('Please complete the incident creation form.'))
+ end
+
+ ServiceResponse.error(
+ message: _('Something went wrong while opening the incident form.'),
+ payload: response
+ ).track_exception(
+ response: response.to_h,
+ slack_workspace_id: team_id,
+ slack_user_id: slack_installation.user_id
+ )
+ end
+
+ def modal_view
+ {
+ trigger_id: trigger_id,
+ view: modal_payload
+ }
+ end
+
+ def modal_payload
+ ::Slack::BlockKit::IncidentManagement::IncidentModalOpened.new(
+ user_projects,
+ response_url
+ ).build
+ end
+
+ def project_id(response)
+ response.dig(
+ 'view', 'state', 'values',
+ 'project_and_severity_selector',
+ 'incident_management_project', 'selected_option',
+ 'value')
+ end
+
+ def view_id(response)
+ response.dig('view', 'id')
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_interactions/incident_management/incident_modal_submit_service.rb b/app/services/integrations/slack_interactions/incident_management/incident_modal_submit_service.rb
new file mode 100644
index 00000000000..34af03640d3
--- /dev/null
+++ b/app/services/integrations/slack_interactions/incident_management/incident_modal_submit_service.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInteractions
+ module IncidentManagement
+ class IncidentModalSubmitService
+ include GitlabRoutingHelper
+ include Gitlab::Routing
+
+ IssueCreateError = Class.new(StandardError)
+
+ def initialize(params)
+ @params = params
+ @values = params.dig(:view, :state, :values)
+ @team_id = params.dig(:team, :id)
+ @user_id = params.dig(:user, :id)
+ @additional_message = ''
+ end
+
+ def execute
+ create_response = Issues::CreateService.new(
+ container: project,
+ current_user: find_user.user,
+ params: incident_params,
+ spam_params: nil
+ ).execute
+
+ raise IssueCreateError, create_response.errors.to_sentence if create_response.error?
+
+ incident = create_response.payload[:issue]
+ incident_link = incident_link_text(incident)
+ response = send_to_slack(incident_link)
+
+ return ServiceResponse.success(payload: { incident: incident }) if response['ok']
+
+ ServiceResponse.error(
+ message: _('Something went wrong when sending the incident link to Slack.'),
+ payload: response
+ ).track_exception(
+ response: response.to_h,
+ slack_workspace_id: team_id,
+ slack_user_id: user_id
+ )
+ rescue StandardError => e
+ send_to_slack(_('There was a problem creating the incident. Please try again.'))
+
+ ServiceResponse
+ .error(
+ message: e.message
+ ).track_exception(
+ slack_workspace_id: team_id,
+ slack_user_id: user_id,
+ as: e.class
+ )
+ end
+
+ private
+
+ attr_accessor :params, :values, :team_id, :user_id, :additional_message
+
+ def incident_params
+ {
+ title: values.dig(:title_input, :title, :value),
+ severity: severity,
+ confidential: confidential?,
+ description: description,
+ escalation_status: { status: status },
+ issue_type: "incident",
+ assignee_ids: [assignee],
+ label_ids: labels
+ }
+ end
+
+ def strip_markup(string)
+ SlackMarkdownSanitizer.sanitize(string)
+ end
+
+ def send_to_slack(text)
+ response_url = params.dig(:view, :private_metadata)
+
+ body = {
+ replace_original: 'true',
+ text: text
+ }
+
+ Gitlab::HTTP.post(
+ response_url,
+ body: Gitlab::Json.dump(body),
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ def incident_link_text(incident)
+ "#{_('New incident has been created')}: " \
+ "<#{issue_url(incident)}|#{incident.to_reference} " \
+ "- #{strip_markup(incident.title)}>. #{@additional_message}"
+ end
+
+ def project
+ project_id = values.dig(
+ :project_and_severity_selector,
+ :incident_management_project,
+ :selected_option,
+ :value)
+
+ Project.find(project_id)
+ end
+
+ def find_user
+ ChatNames::FindUserService.new(team_id, user_id).execute
+ end
+
+ def description
+ description =
+ values.dig(:incident_description, :description, :value) ||
+ values.dig(project.id.to_s.to_sym, :description, :value)
+
+ zoom_link = values.dig(:zoom, :link, :value)
+
+ return description if zoom_link.blank?
+
+ "#{description} \n/zoom #{zoom_link}"
+ end
+
+ def confidential?
+ values.dig(:confidentiality, :confidential, :selected_options).present?
+ end
+
+ def severity
+ values.dig(:project_and_severity_selector, :severity, :selected_option, :value) || 'unknown'
+ end
+
+ def status
+ values.dig(:status_and_assignee_selector, :status, :selected_option, :value)
+ end
+
+ def assignee
+ assignee_id = values.dig(:status_and_assignee_selector, :assignee, :selected_option, :value)
+
+ return unless assignee_id
+
+ user = User.find_by_id(assignee_id)
+ member = project.member(user)
+
+ unless member
+ @additional_message =
+ "However, " \
+ "#{user.name} was not assigned to the incident as they are not a member in #{project.name}."
+
+ return
+ end
+
+ member.user_id
+ end
+
+ def labels
+ values.dig(:label_selector, :labels, :selected_options)&.pluck(:value)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_interactions/slack_block_actions/incident_management/project_update_handler.rb b/app/services/integrations/slack_interactions/slack_block_actions/incident_management/project_update_handler.rb
new file mode 100644
index 00000000000..5f24c8ec4f5
--- /dev/null
+++ b/app/services/integrations/slack_interactions/slack_block_actions/incident_management/project_update_handler.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackInteractions
+ module SlackBlockActions
+ module IncidentManagement
+ class ProjectUpdateHandler
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(params, action)
+ @view = params[:view]
+ @action = action
+ @team_id = params.dig(:view, :team_id)
+ @user_id = params.dig(:user, :id)
+ end
+
+ def execute
+ return if project_unchanged?
+ return unless allowed?
+
+ post_updated_modal
+ end
+
+ private
+
+ def allowed?
+ return false unless current_user
+
+ current_user.can?(:read_project, old_project) &&
+ current_user.can?(:read_project, new_project)
+ end
+
+ def current_user
+ ChatNames::FindUserService.new(team_id, user_id).execute&.user
+ end
+ strong_memoize_attr :current_user
+
+ def slack_installation
+ SlackIntegration.with_bot.find_by_team_id(team_id)
+ end
+ strong_memoize_attr :slack_installation
+
+ def post_updated_modal
+ modal = update_modal
+
+ begin
+ response = ::Slack::API.new(slack_installation).post(
+ 'views.update',
+ {
+ view_id: view[:id],
+ view: modal
+ }
+ )
+ rescue *::Gitlab::HTTP::HTTP_ERRORS => e
+ return ServiceResponse
+ .error(message: 'HTTP exception when calling Slack API')
+ .track_exception(
+ as: e.class,
+ slack_workspace_id: view[:team_id]
+ )
+ end
+
+ return ServiceResponse.success(message: _('Modal updated')) if response['ok']
+
+ ServiceResponse.error(
+ message: _('Something went wrong while updating the modal.'),
+ payload: response
+ ).track_exception(
+ response: response.to_h,
+ slack_workspace_id: view[:team_id],
+ slack_user_id: slack_installation.user_id
+ )
+ end
+
+ def update_modal
+ updated_view = update_incident_template
+ cleanup(updated_view)
+ end
+
+ def update_incident_template
+ updated_view = view.dup
+
+ incident_description_blocks = updated_view[:blocks].select do |block|
+ block[:block_id] == 'incident_description' || block[:block_id] == old_project.id.to_s
+ end
+
+ incident_description_blocks.first[:element][:initial_value] = read_template_content
+ incident_description_blocks.first[:block_id] = new_project.id.to_s
+
+ Integrations::SlackInteractions::IncidentManagement::IncidentModalOpenedService
+ .cache_write(view[:id], new_project.id.to_s)
+
+ updated_view
+ end
+
+ def new_project
+ Project.find(action.dig(:selected_option, :value))
+ end
+ strong_memoize_attr :new_project
+
+ def old_project
+ old_project_id = Integrations::SlackInteractions::IncidentManagement::IncidentModalOpenedService
+ .cache_read(view[:id])
+
+ Project.find(old_project_id) if old_project_id
+ end
+ strong_memoize_attr :old_project
+
+ def project_unchanged?
+ old_project == new_project
+ end
+
+ def read_template_content
+ new_project.incident_management_setting&.issue_template_content.to_s
+ end
+
+ def cleanup(view)
+ view.except!(
+ :id, :team_id, :state,
+ :hash, :previous_view_id,
+ :root_view_id, :app_id,
+ :app_installed_team_id,
+ :bot_id)
+ end
+
+ attr_accessor :view, :action, :team_id, :user_id
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/integrations/slack_option_service.rb b/app/services/integrations/slack_option_service.rb
new file mode 100644
index 00000000000..a659f8b0634
--- /dev/null
+++ b/app/services/integrations/slack_option_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Integrations
+ class SlackOptionService
+ UnknownOptionError = Class.new(StandardError)
+
+ OPTIONS = {
+ 'assignee' => SlackOptions::UserSearchHandler,
+ 'labels' => SlackOptions::LabelSearchHandler
+ }.freeze
+
+ def initialize(params)
+ @params = params
+ @search_type = params.delete(:action_id)
+ @selected_value = params.delete(:value)
+ @view_id = params.dig(:view, :id)
+ end
+
+ def execute
+ raise UnknownOptionError, "Unable to handle option: '#{search_type}'" \
+ unless option?(search_type)
+
+ handler_class = OPTIONS[search_type]
+ handler_class.new(current_user, selected_value, view_id).execute
+ end
+
+ private
+
+ def current_user
+ ChatNames::FindUserService.new(
+ params.dig(:team, :id),
+ params.dig(:user, :id)
+ ).execute
+ end
+
+ def option?(option)
+ OPTIONS.key?(option)
+ end
+
+ attr_reader :params, :search_type, :selected_value, :view_id
+ end
+end
diff --git a/app/services/integrations/slack_options/label_search_handler.rb b/app/services/integrations/slack_options/label_search_handler.rb
new file mode 100644
index 00000000000..4e5c9dcb48a
--- /dev/null
+++ b/app/services/integrations/slack_options/label_search_handler.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackOptions
+ class LabelSearchHandler # rubocop:disable Search/NamespacedClass
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(current_user, search_value, view_id)
+ @current_user = current_user.user
+ @search_value = search_value
+ @view_id = view_id
+ end
+
+ def execute
+ return ServiceResponse.success(payload: []) unless current_user.can?(:read_label, project)
+
+ labels = LabelsFinder.new(
+ current_user,
+ {
+ project: project,
+ search: search_value
+ }
+ ).execute
+
+ ServiceResponse.success(payload: build_label_list(labels))
+ end
+
+ private
+
+ def project
+ project_id = Integrations::SlackInteractions::IncidentManagement::IncidentModalOpenedService
+ .cache_read(view_id)
+
+ return unless project_id
+
+ Project.find(project_id)
+ end
+ strong_memoize_attr :project
+
+ def build_label_list(labels)
+ return [] unless labels
+
+ label_list = labels.map do |label|
+ {
+ text: {
+ type: "plain_text",
+ text: label.name
+ },
+ value: label.id.to_s
+ }
+ end
+
+ {
+ options: label_list
+ }
+ end
+
+ attr_accessor :current_user, :search_value, :view_id
+ end
+ end
+end
diff --git a/app/services/integrations/slack_options/user_search_handler.rb b/app/services/integrations/slack_options/user_search_handler.rb
new file mode 100644
index 00000000000..b7a400b5ee2
--- /dev/null
+++ b/app/services/integrations/slack_options/user_search_handler.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module Integrations
+ module SlackOptions
+ class UserSearchHandler # rubocop:disable Search/NamespacedClass
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(current_user, search_value, view_id)
+ @current_user = current_user.user
+ @search_value = search_value
+ @view_id = view_id
+ end
+
+ def execute
+ return ServiceResponse.success(payload: []) unless current_user.can?(:read_project_member, project)
+
+ members = MembersFinder.new(project, current_user, params: { search: search_value }).execute
+
+ ServiceResponse.success(payload: build_user_list(members))
+ end
+
+ private
+
+ def project
+ project_id = SlackInteractions::IncidentManagement::IncidentModalOpenedService
+ .cache_read(view_id)
+
+ return unless project_id
+
+ Project.find(project_id)
+ end
+ strong_memoize_attr :project
+
+ def build_user_list(members)
+ return [] unless members
+
+ user_list = members.map do |member|
+ {
+ text: {
+ type: "plain_text",
+ text: "#{member.user.name} - #{member.user.username}"
+ },
+ value: member.user.id.to_s
+ }
+ end
+
+ {
+ options: user_list
+ }
+ end
+
+ attr_reader :current_user, :search_value, :view_id
+ end
+ end
+end
diff --git a/app/services/issuable/callbacks/base.rb b/app/services/issuable/callbacks/base.rb
new file mode 100644
index 00000000000..3fabce2c949
--- /dev/null
+++ b/app/services/issuable/callbacks/base.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Issuable
+ module Callbacks
+ class Base
+ include Gitlab::Allowable
+
+ def initialize(issuable:, current_user:, params:)
+ @issuable = issuable
+ @current_user = current_user
+ @params = params
+ end
+
+ def after_initialize; end
+ def after_update_commit; end
+ def after_save_commit; end
+
+ private
+
+ attr_reader :issuable, :current_user, :params
+
+ def excluded_in_new_type?
+ params.key?(:excluded_in_new_type) && params[:excluded_in_new_type]
+ end
+
+ def has_permission?(permission)
+ can?(current_user, permission, issuable)
+ end
+ end
+ end
+end
diff --git a/app/services/issuable/callbacks/milestone.rb b/app/services/issuable/callbacks/milestone.rb
new file mode 100644
index 00000000000..7f922c26e07
--- /dev/null
+++ b/app/services/issuable/callbacks/milestone.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+module Issuable
+ module Callbacks
+ class Milestone < Base
+ ALLOWED_PARAMS = %i[milestone milestone_id skip_milestone_email].freeze
+
+ def after_initialize
+ params[:milestone_id] = nil if excluded_in_new_type?
+ return unless params.key?(:milestone_id) && has_permission?(:"set_#{issuable.to_ability_name}_metadata")
+
+ @old_milestone = issuable.milestone
+
+ if params[:milestone_id].blank? || params[:milestone_id].to_s == IssuableFinder::Params::NONE
+ issuable.milestone = nil
+
+ return
+ end
+
+ resource_group = issuable.project&.group || issuable.try(:namespace)
+ project_ids = [issuable.project&.id].compact
+
+ milestone = MilestonesFinder.new({
+ project_ids: project_ids,
+ group_ids: resource_group&.self_and_ancestors&.select(:id),
+ ids: [params[:milestone_id]]
+ }).execute.first
+
+ issuable.milestone = milestone if milestone
+ end
+
+ def after_update_commit
+ return unless issuable.previous_changes.include?('milestone_id')
+
+ update_usage_data_counters
+ send_milestone_change_notification
+
+ GraphqlTriggers.issuable_milestone_updated(issuable)
+ end
+
+ def after_save_commit
+ return unless issuable.previous_changes.include?('milestone_id')
+
+ invalidate_milestone_counters
+ end
+
+ private
+
+ def invalidate_milestone_counters
+ [@old_milestone, issuable.milestone].compact.each do |milestone|
+ case issuable
+ when Issue
+ ::Milestones::ClosedIssuesCountService.new(milestone).delete_cache
+ ::Milestones::IssuesCountService.new(milestone).delete_cache
+ when MergeRequest
+ ::Milestones::MergeRequestsCountService.new(milestone).delete_cache
+ end
+ end
+ end
+
+ def update_usage_data_counters
+ return unless issuable.is_a?(MergeRequest)
+
+ Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter
+ .track_milestone_changed_action(user: current_user)
+ end
+
+ def send_milestone_change_notification
+ return if params[:skip_milestone_email]
+
+ notification_service = NotificationService.new.async
+
+ if issuable.milestone.nil?
+ notification_service.removed_milestone(issuable, current_user)
+ else
+ notification_service.changed_milestone(issuable, issuable.milestone, current_user)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/issuable/clone/base_service.rb b/app/services/issuable/clone/base_service.rb
index 02beaaf5d83..a4e815e70fc 100644
--- a/app/services/issuable/clone/base_service.rb
+++ b/app/services/issuable/clone/base_service.rb
@@ -7,11 +7,6 @@ module Issuable
alias_method :old_project, :project
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
def execute(original_entity, target_parent)
@original_entity = original_entity
@target_parent = target_parent
diff --git a/app/services/issuable/destroy_service.rb b/app/services/issuable/destroy_service.rb
index 4c3e518d62b..261afb767bb 100644
--- a/app/services/issuable/destroy_service.rb
+++ b/app/services/issuable/destroy_service.rb
@@ -4,7 +4,7 @@ module Issuable
class DestroyService < IssuableBaseService
# TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
+ super(container: container, current_user: current_user, params: params)
end
def execute(issuable)
diff --git a/app/services/issuable/import_csv/base_service.rb b/app/services/issuable/import_csv/base_service.rb
index 83cf5a67453..9ef9fb76e3c 100644
--- a/app/services/issuable/import_csv/base_service.rb
+++ b/app/services/issuable/import_csv/base_service.rb
@@ -21,7 +21,7 @@ module Issuable
headers.downcase! if headers
return if headers && headers.include?('title') && headers.include?('description')
- raise CSV::MalformedCSVError
+ raise CSV::MalformedCSVError.new('Invalid CSV format - missing required headers.', 1)
end
end
end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 911d04d6b7a..e9312bd6b31 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -1,8 +1,33 @@
# frozen_string_literal: true
-class IssuableBaseService < ::BaseProjectService
+class IssuableBaseService < ::BaseContainerService
private
+ def available_callbacks
+ [
+ Issuable::Callbacks::Milestone
+ ].freeze
+ end
+
+ def initialize_callbacks!(issuable)
+ @callbacks = available_callbacks.filter_map do |callback_class|
+ callback_params = params.slice(*callback_class::ALLOWED_PARAMS)
+
+ next if callback_params.empty?
+
+ callback_class.new(issuable: issuable, current_user: current_user, params: callback_params)
+ end
+
+ remove_callback_params
+ @callbacks.each(&:after_initialize)
+ end
+
+ def remove_callback_params
+ available_callbacks.each do |callback_class|
+ callback_class::ALLOWED_PARAMS.each { |p| params.delete(p) }
+ end
+ end
+
def self.constructor_container_arg(value)
# TODO: Dynamically determining the type of a constructor arg based on the class is an antipattern,
# but the root cause is that Epics::BaseService has some issues that inheritance may not be the
@@ -10,15 +35,15 @@ class IssuableBaseService < ::BaseProjectService
# Follow on issue to address this:
# https://gitlab.com/gitlab-org/gitlab/-/issues/328438
- { project: value }
+ { container: value }
end
- attr_accessor :params, :skip_milestone_email
+ attr_accessor :params
- def initialize(project:, current_user: nil, params: {})
- super
-
- @skip_milestone_email = @params.delete(:skip_milestone_email)
+ def initialize(container:, current_user: nil, params: {})
+ # we need to exclude project params since they may come from external requests. project should always
+ # be passed as part of the service's initializer
+ super(container: container, current_user: current_user, params: params.except(:project, :project_id))
end
def can_admin_issuable?(issuable)
@@ -34,10 +59,7 @@ class IssuableBaseService < ::BaseProjectService
end
def filter_params(issuable)
- params.delete(:milestone)
-
unless can_set_issuable_metadata?(issuable)
- params.delete(:milestone_id)
params.delete(:labels)
params.delete(:add_label_ids)
params.delete(:add_labels)
@@ -61,7 +83,6 @@ class IssuableBaseService < ::BaseProjectService
params.delete(:remove_contacts) unless can?(current_user, :set_issue_crm_contacts, issuable)
filter_assignees(issuable)
- filter_milestone
filter_labels
filter_severity(issuable)
filter_escalation_status(issuable)
@@ -102,19 +123,6 @@ class IssuableBaseService < ::BaseProjectService
can?(user, ability_name, resource)
end
- def filter_milestone
- milestone_id = params[:milestone_id]
- return unless milestone_id
-
- params[:milestone_id] = '' if milestone_id == IssuableFinder::Params::NONE
- groups = project.group&.self_and_ancestors&.select(:id)
-
- milestone =
- Milestone.for_projects_and_groups([project.id], groups).find_by_id(milestone_id)
-
- params[:milestone_id] = '' unless milestone
- end
-
def filter_labels
label_ids_to_filter(:add_label_ids, :add_labels, false)
label_ids_to_filter(:remove_label_ids, :remove_labels, true)
@@ -206,6 +214,8 @@ class IssuableBaseService < ::BaseProjectService
end
def create(issuable, skip_system_notes: false)
+ initialize_callbacks!(issuable)
+
handle_quick_actions(issuable)
filter_params(issuable)
@@ -229,6 +239,8 @@ class IssuableBaseService < ::BaseProjectService
end
if issuable_saved
+ @callbacks.each(&:after_save_commit)
+
create_system_notes(issuable, is_update: false) unless skip_system_notes
handle_changes(issuable, { params: params })
@@ -278,19 +290,22 @@ class IssuableBaseService < ::BaseProjectService
end
def update(issuable)
+ old_associations = associations_before_update(issuable)
+
+ initialize_callbacks!(issuable)
+
prepare_update_params(issuable)
handle_quick_actions(issuable)
filter_params(issuable)
change_additional_attributes(issuable)
- old_associations = associations_before_update(issuable)
assign_requested_labels(issuable)
assign_requested_assignees(issuable)
assign_requested_crm_contacts(issuable)
widget_params = filter_widget_params
- if issuable.changed? || params.present? || widget_params.present?
+ if issuable.changed? || params.present? || widget_params.present? || @callbacks.present?
issuable.assign_attributes(allowed_update_params(params))
if issuable.description_changed?
@@ -307,13 +322,15 @@ class IssuableBaseService < ::BaseProjectService
# We have to perform this check before saving the issuable as Rails resets
# the changed fields upon calling #save.
update_project_counters = issuable.project && update_project_counter_caches?(issuable)
- ensure_milestone_available(issuable)
issuable_saved = issuable.with_transaction_returning_status do
transaction_update(issuable, { save_with_touch: should_touch })
end
if issuable_saved
+ @callbacks.each(&:after_update_commit)
+ @callbacks.each(&:after_save_commit)
+
create_system_notes(
issuable, old_labels: old_associations[:labels], old_milestone: old_associations[:milestone]
)
@@ -584,14 +601,6 @@ class IssuableBaseService < ::BaseProjectService
project
end
- # we need to check this because milestone from milestone_id param is displayed on "new" page
- # where private project milestone could leak without this check
- def ensure_milestone_available(issuable)
- return unless issuable.supports_milestone? && issuable.milestone_id.present?
-
- issuable.milestone_id = nil unless issuable.milestone_available?
- end
-
def update_timestamp?(issuable)
issuable.changes.keys != ["relative_position"]
end
diff --git a/app/services/issuable_links/create_service.rb b/app/services/issuable_links/create_service.rb
index f244f54b25f..1069c9e0915 100644
--- a/app/services/issuable_links/create_service.rb
+++ b/app/services/issuable_links/create_service.rb
@@ -19,6 +19,10 @@ module IssuableLinks
return error(issuables_already_assigned_message, 409)
end
+ if render_no_permission_error?
+ return error(issuables_no_permission_error_message, 403)
+ end
+
if render_not_found_error?
return error(issuables_not_found_message, 404)
end
@@ -46,6 +50,7 @@ module IssuableLinks
link
end
+
# rubocop: enable CodeReuse/ActiveRecord
private
@@ -54,6 +59,10 @@ module IssuableLinks
referenced_issuables.present? && (referenced_issuables - previous_related_issuables).empty?
end
+ def render_no_permission_error?
+ readonly_issuables(referenced_issuables).present? && linkable_issuables(referenced_issuables).empty?
+ end
+
def render_not_found_error?
linkable_issuables(referenced_issuables).empty?
end
@@ -116,6 +125,10 @@ module IssuableLinks
_('%{issuable}(s) already assigned' % { issuable: target_issuable_type.capitalize })
end
+ def issuables_no_permission_error_message
+ _("Couldn't link %{issuable}. You must have at least the Reporter role in both projects." % { issuable: target_issuable_type })
+ end
+
def issuables_not_found_message
_('No matching %{issuable} found. Make sure that you are adding a valid %{issuable} URL.' % { issuable: target_issuable_type })
end
@@ -133,6 +146,10 @@ module IssuableLinks
raise NotImplementedError
end
+ def readonly_issuables(_issuables)
+ [] # default to empty for non-issues
+ end
+
def previous_related_issuables
raise NotImplementedError
end
diff --git a/app/services/issue_links/create_service.rb b/app/services/issue_links/create_service.rb
index 80c6af88f21..db05920678e 100644
--- a/app/services/issue_links/create_service.rb
+++ b/app/services/issue_links/create_service.rb
@@ -14,6 +14,10 @@ module IssueLinks
private
+ def readonly_issuables(issuables)
+ @readonly_issuables ||= issuables.select { |issuable| issuable.readable_by?(current_user) }
+ end
+
def track_event
track_incident_action(current_user, issuable, :incident_relate)
end
diff --git a/app/services/issues/after_create_service.rb b/app/services/issues/after_create_service.rb
index 011a78029c8..e996724ebd6 100644
--- a/app/services/issues/after_create_service.rb
+++ b/app/services/issues/after_create_service.rb
@@ -2,14 +2,8 @@
module Issues
class AfterCreateService < Issues::BaseService
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
def execute(issue)
todo_service.new_issue(issue, current_user)
- delete_milestone_total_issue_counter_cache(issue.milestone)
track_incident_action(current_user, issue, :incident_created)
end
end
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 553fb6e2ac9..efe42fb29d5 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -33,6 +33,14 @@ module Issues
private
+ # overriding this because IssuableBaseService#constructor_container_arg returns { project: value }
+ # Issues::ReopenService constructor signature is different now, it takes container instead of project also
+ # IssuableBaseService#change_state dynamically picks one of the `Issues::ReopenService`, `Epics::ReopenService` or
+ # MergeRequests::ReopenService, so we need this method to return { }container: value } for Issues::ReopenService
+ def self.constructor_container_arg(value)
+ { container: value }
+ end
+
def find_work_item_type_id(issue_type)
work_item_type = WorkItems::Type.default_by_type(issue_type)
work_item_type ||= WorkItems::Type.default_issue_type
@@ -45,6 +53,10 @@ module Issues
params.delete(:issue_type) unless create_issue_type_allowed?(issue, params[:issue_type])
+ if params[:work_item_type].present? && !create_issue_type_allowed?(project, params[:work_item_type].base_type)
+ params.delete(:work_item_type)
+ end
+
moved_issue = params.delete(:moved_issue)
# Setting created_at, updated_at and iid is allowed only for admins and owners or
@@ -95,10 +107,10 @@ module Issues
def execute_hooks(issue, action = 'open', old_associations: {})
issue_data = Gitlab::Lazy.new { hook_data(issue, action, old_associations: old_associations) }
hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks
- issue.project.execute_hooks(issue_data, hooks_scope)
- issue.project.execute_integrations(issue_data, hooks_scope)
+ issue.namespace.execute_hooks(issue_data, hooks_scope)
+ issue.namespace.execute_integrations(issue_data, hooks_scope)
- execute_incident_hooks(issue, issue_data) if issue.incident?
+ execute_incident_hooks(issue, issue_data) if issue.work_item_type&.incident?
end
# We can remove this code after proposal in
@@ -106,29 +118,12 @@ module Issues
def execute_incident_hooks(issue, issue_data)
issue_data[:object_kind] = 'incident'
issue_data[:event_type] = 'incident'
- issue.project.execute_integrations(issue_data, :incident_hooks)
+ issue.namespace.execute_integrations(issue_data, :incident_hooks)
end
def update_project_counter_caches?(issue)
super || issue.confidential_changed?
end
-
- def delete_milestone_closed_issue_counter_cache(milestone)
- return unless milestone
-
- Milestones::ClosedIssuesCountService.new(milestone).delete_cache
- end
-
- def delete_milestone_total_issue_counter_cache(milestone)
- return unless milestone
-
- Milestones::IssuesCountService.new(milestone).delete_cache
- end
-
- override :allowed_create_params
- def allowed_create_params(params)
- super(params).except(:work_item_type_id, :work_item_type)
- end
end
end
diff --git a/app/services/issues/build_service.rb b/app/services/issues/build_service.rb
index 877ce09e065..a65fc0c7c87 100644
--- a/app/services/issues/build_service.rb
+++ b/app/services/issues/build_service.rb
@@ -4,16 +4,21 @@ module Issues
class BuildService < Issues::BaseService
include ResolveDiscussions
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
- def execute
+ def execute(initialize_callbacks: true)
filter_resolve_discussion_params
- @issue = model_klass.new(issue_params.merge(project: project)).tap do |issue|
- ensure_milestone_available(issue)
+ container_param = case container
+ when Project
+ { project: project }
+ when Namespaces::ProjectNamespace
+ { project: container.project }
+ else
+ { namespace: container }
+ end
+
+ @issue = model_klass.new(issue_params.merge(container_param)).tap do |issue|
+ set_work_item_type(issue)
+ initialize_callbacks!(issue) if initialize_callbacks
end
end
@@ -66,22 +71,32 @@ module Issues
def issue_params
@issue_params ||= build_issue_params
+ end
+
+ private
+
+ def set_work_item_type(issue)
+ work_item_type = if params[:work_item_type_id].present?
+ params.delete(:work_item_type)
+ WorkItems::Type.find_by(id: params.delete(:work_item_type_id)) # rubocop: disable CodeReuse/ActiveRecord
+ else
+ params.delete(:work_item_type)
+ end
+
+ base_type = work_item_type&.base_type
- if @issue_params[:work_item_type].present?
- @issue_params[:issue_type] = @issue_params[:work_item_type].base_type
+ if create_issue_type_allowed?(container, base_type)
+ issue.work_item_type = work_item_type
+ # Up to this point issue_type might be set to the default, so we need to sync if a work item type is provided
+ issue.issue_type = base_type
else
- # If :issue_type is nil then params[:issue_type] was either nil
- # or not permitted. Either way, the :issue_type will default
- # to the column default of `issue`. And that means we need to
- # ensure the work_item_type_id is set
- @issue_params[:work_item_type_id] = get_work_item_type_id(@issue_params[:issue_type])
+ # If no work item type was provided or not allowed, we need to set it to issue_type,
+ # and that includes the column default
+ issue_type = issue_params[:issue_type] || ::Issue::DEFAULT_ISSUE_TYPE
+ issue.work_item_type = WorkItems::Type.default_by_type(issue_type)
end
-
- @issue_params
end
- private
-
def model_klass
::Issue
end
@@ -94,11 +109,7 @@ module Issues
:confidential
]
- params[:work_item_type] = WorkItems::Type.find_by(id: params[:work_item_type_id]) if params[:work_item_type_id].present? # rubocop: disable CodeReuse/ActiveRecord
-
- public_issue_params << :milestone_id if can?(current_user, :admin_issue, project)
- public_issue_params << :issue_type if create_issue_type_allowed?(project, params[:issue_type])
- public_issue_params << :work_item_type if create_issue_type_allowed?(project, params[:work_item_type]&.base_type)
+ public_issue_params << :issue_type if create_issue_type_allowed?(container, params[:issue_type])
params.slice(*public_issue_params)
end
@@ -109,10 +120,6 @@ module Issues
.merge(public_params)
.with_indifferent_access
end
-
- def get_work_item_type_id(issue_type = :issue)
- find_work_item_type_id(issue_type)
- end
end
end
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index 9fde1cc2ac2..e45033f2b91 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -2,11 +2,6 @@
module Issues
class CloseService < Issues::BaseService
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
# Closes the supplied issue if the current user is able to do so.
def execute(issue, commit: nil, notifications: true, system_note: true, skip_authorization: false)
return issue unless can_close?(issue, skip_authorization: skip_authorization)
@@ -48,7 +43,7 @@ module Issues
Onboarding::ProgressService.new(project.namespace).execute(action: :issue_auto_closed)
end
- delete_milestone_closed_issue_counter_cache(issue.milestone)
+ Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone
end
issue
@@ -56,11 +51,6 @@ module Issues
private
- # TODO: remove once MergeRequests::CloseService or IssuableBaseService method is changed.
- def self.constructor_container_arg(value)
- { container: value }
- end
-
def can_close?(issue, skip_authorization: false)
skip_authorization || can?(current_user, :update_issue, issue) || issue.is_a?(ExternalIssue)
end
@@ -103,7 +93,7 @@ module Issues
end
def resolve_incident(issue)
- return unless issue.incident?
+ return unless issue.work_item_type&.incident?
status = issue.incident_management_issuable_escalation_status || issue.build_incident_management_issuable_escalation_status
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index fa5233da489..ba8f00d03d4 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -15,18 +15,22 @@ module Issues
# SpamParams constructor are not otherwise available, spam_params: must be explicitly passed as nil.
def initialize(container:, spam_params:, current_user: nil, params: {}, build_service: nil)
@extra_params = params.delete(:extra_params) || {}
- super(project: container, current_user: current_user, params: params)
+ super(container: container, current_user: current_user, params: params)
@spam_params = spam_params
- @build_service = build_service || BuildService.new(container: project, current_user: current_user, params: params)
+ @build_service = build_service ||
+ BuildService.new(container: project, current_user: current_user, params: params)
end
def execute(skip_system_notes: false)
- return error(_('Operation not allowed'), 403) unless @current_user.can?(authorization_action, @project)
+ return error(_('Operation not allowed'), 403) unless @current_user.can?(authorization_action, container)
- @issue = @build_service.execute
- # issue_type is set in BuildService, so we can delete it from params, in later phase
- # it can be set also from quick actions - in that case work_item_id is synced later again
- params.delete(:issue_type)
+ # We should not initialize the callback classes during the build service execution because these will be
+ # initialized when we call #create below
+ @issue = @build_service.execute(initialize_callbacks: false)
+
+ # issue_type and work_item_type are set in BuildService, so we can delete it from params, in later phase
+ # it can be set also from quick actions
+ [:issue_type, :work_item_type, :work_item_type_id].each { |attribute| params.delete(attribute) }
handle_move_between_ids(@issue)
@@ -59,7 +63,8 @@ module Issues
issue.run_after_commit do
NewIssueWorker.perform_async(issue.id, user.id, issue.class.to_s)
Issues::PlacementWorker.perform_async(nil, issue.project_id)
- Onboarding::IssueCreatedWorker.perform_async(issue.project.namespace_id)
+ # issue.namespace_id can point to either a project through project namespace or a group.
+ Onboarding::IssueCreatedWorker.perform_async(issue.namespace_id)
end
end
@@ -71,7 +76,6 @@ module Issues
handle_escalation_status_change(issue)
create_timeline_event(issue)
try_to_associate_contacts(issue)
- change_additional_attributes(issue)
super
end
@@ -88,6 +92,7 @@ module Issues
return if issue.assignees == old_assignees
create_assignee_note(issue, old_assignees)
+ Gitlab::ResourceEvents::AssignmentEventRecorder.new(parent: issue, old_assignees: old_assignees).record
end
def resolve_discussions_with_issue(issue)
@@ -100,18 +105,6 @@ module Issues
private
- def self.constructor_container_arg(value)
- { container: value }
- end
-
- def handle_quick_actions(issue)
- # Do not handle quick actions unless the work item is the default Issue.
- # The available quick actions for a work item depend on its type and widgets.
- return if @params[:work_item_type].present? && @params[:work_item_type] != WorkItems::Type.default_by_type(:issue)
-
- super
- end
-
def authorization_action
:create_issue
end
@@ -119,7 +112,7 @@ module Issues
attr_reader :spam_params, :extra_params
def create_timeline_event(issue)
- return unless issue.incident?
+ return unless issue.work_item_type&.incident?
IncidentManagement::TimelineEvents::CreateService.create_incident(issue, current_user)
end
@@ -143,15 +136,6 @@ module Issues
set_crm_contacts(issue, contacts)
end
-
- override :change_additional_attributes
- def change_additional_attributes(issue)
- super
-
- # issue_type can be still set through quick actions, in that case
- # we have to make sure to re-sync work_item_type with it
- issue.work_item_type_id = find_work_item_type_id(params[:issue_type]) if params[:issue_type]
- end
end
end
diff --git a/app/services/issues/duplicate_service.rb b/app/services/issues/duplicate_service.rb
index a3213c50f86..1fff9a4a684 100644
--- a/app/services/issues/duplicate_service.rb
+++ b/app/services/issues/duplicate_service.rb
@@ -2,11 +2,6 @@
module Issues
class DuplicateService < Issues::BaseService
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
def execute(duplicate_issue, canonical_issue)
return if canonical_issue == duplicate_issue
return unless can?(current_user, :update_issue, duplicate_issue)
diff --git a/app/services/issues/export_csv_service.rb b/app/services/issues/export_csv_service.rb
index d7c1ea276de..9e524d90505 100644
--- a/app/services/issues/export_csv_service.rb
+++ b/app/services/issues/export_csv_service.rb
@@ -18,7 +18,7 @@ module Issues
private
def associations_to_preload
- [:author, :assignees, :timelogs, :milestone, { project: { namespace: :route } }]
+ [:work_item_type, :author, :assignees, :timelogs, :milestone, { project: { namespace: :route } }]
end
def header_to_value_hash
diff --git a/app/services/issues/referenced_merge_requests_service.rb b/app/services/issues/referenced_merge_requests_service.rb
index ba03927136a..ff7cf65e757 100644
--- a/app/services/issues/referenced_merge_requests_service.rb
+++ b/app/services/issues/referenced_merge_requests_service.rb
@@ -2,19 +2,15 @@
module Issues
class ReferencedMergeRequestsService < Issues::BaseService
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
# rubocop: disable CodeReuse/ActiveRecord
def execute(issue)
referenced = referenced_merge_requests(issue)
closed_by = closed_by_merge_requests(issue)
- preloader = ActiveRecord::Associations::Preloader.new
- preloader.preload(referenced + closed_by,
- head_pipeline: { project: [:route, { namespace: :route }] })
+ ActiveRecord::Associations::Preloader.new(
+ records: referenced + closed_by,
+ associations: { head_pipeline: { project: [:route, { namespace: :route }] } }
+ ).call
[sort_by_iid(referenced), sort_by_iid(closed_by)]
end
diff --git a/app/services/issues/related_branches_service.rb b/app/services/issues/related_branches_service.rb
index 3f4413fdfd7..ef6de83fcf4 100644
--- a/app/services/issues/related_branches_service.rb
+++ b/app/services/issues/related_branches_service.rb
@@ -4,11 +4,6 @@
# those with a merge request open referencing the current issue.
module Issues
class RelatedBranchesService < Issues::BaseService
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
def execute(issue)
branch_names_with_mrs = branches_with_merge_request_for(issue)
branches = branches_with_iid_of(issue).reject { |b| branch_names_with_mrs.include?(b[:name]) }
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index ebcf2fb5c83..f4d229ecec7 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -2,11 +2,6 @@
module Issues
class ReopenService < Issues::BaseService
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
def execute(issue, skip_authorization: false)
return issue unless can_reopen?(issue, skip_authorization: skip_authorization)
@@ -18,7 +13,7 @@ module Issues
execute_hooks(issue, 'reopen')
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
- delete_milestone_closed_issue_counter_cache(issue.milestone)
+ Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone
track_incident_action(current_user, issue, :incident_reopened)
end
@@ -27,20 +22,12 @@ module Issues
private
- # overriding this because IssuableBaseService#constructor_container_arg returns { project: value }
- # Issues::ReopenService constructor signature is different now, it takes container instead of project also
- # IssuableBaseService#change_state dynamically picks one of the `Issues::ReopenService`, `Epics::ReopenService` or
- # MergeRequests::ReopenService, so we need this method to return { }container: value } for Issues::ReopenService
- def self.constructor_container_arg(value)
- { container: value }
- end
-
def can_reopen?(issue, skip_authorization: false)
skip_authorization || can?(current_user, :reopen_issue, issue)
end
def perform_incident_management_actions(issue)
- return unless issue.incident?
+ return unless issue.work_item_type&.incident?
create_timeline_event(issue)
end
diff --git a/app/services/issues/reorder_service.rb b/app/services/issues/reorder_service.rb
index 059b4196b23..1afec4c94f4 100644
--- a/app/services/issues/reorder_service.rb
+++ b/app/services/issues/reorder_service.rb
@@ -4,11 +4,6 @@ module Issues
class ReorderService < Issues::BaseService
include Gitlab::Utils::StrongMemoize
- # TODO: this is to be removed once we get to rename the IssuableBaseService project param to container
- def initialize(container:, current_user: nil, params: {})
- super(project: container, current_user: current_user, params: params)
- end
-
def execute(issue)
return false unless can?(current_user, :update_issue, issue)
return false unless move_between_ids
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index 71324b3f044..201bf19b535 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -6,7 +6,7 @@ module Issues
# necessary in many cases, and we don't want to require every caller to explicitly pass it as nil
# to disable spam checking.
def initialize(container:, current_user: nil, params: {}, spam_params: nil)
- super(project: container, current_user: current_user, params: params)
+ super(container: container, current_user: current_user, params: params)
@spam_params = spam_params
end
@@ -39,7 +39,8 @@ module Issues
def change_work_item_type(issue)
return unless issue.changed_attributes['issue_type']
- type_id = find_work_item_type_id(issue.issue_type)
+ issue_type = params[:issue_type] || ::Issue::DEFAULT_ISSUE_TYPE
+ type_id = find_work_item_type_id(issue_type)
issue.work_item_type_id = type_id
end
@@ -64,7 +65,6 @@ module Issues
handle_assignee_changes(issue, old_assignees)
handle_confidential_change(issue)
handle_added_labels(issue, old_labels)
- handle_milestone_change(issue)
handle_added_mentions(issue, old_mentioned_users)
handle_severity_change(issue, old_severity)
handle_escalation_status_change(issue)
@@ -76,6 +76,7 @@ module Issues
return if issue.assignees == old_assignees
create_assignee_note(issue, old_assignees)
+ Gitlab::ResourceEvents::AssignmentEventRecorder.new(parent: issue, old_assignees: old_assignees).record
notification_service.async.reassigned_issue(issue, current_user, old_assignees)
todo_service.reassigned_assignable(issue, current_user, old_assignees)
track_incident_action(current_user, issue, :incident_assigned)
@@ -116,23 +117,6 @@ module Issues
attr_reader :spam_params
- # TODO: remove this once MergeRequests::UpdateService#initialize is changed to take container as named argument.
- #
- # Issues::UpdateService is used together with MergeRequests::UpdateService in Mutations::Assignable#assign! method
- # however MergeRequests::UpdateService#initialize still takes `project` as param and Issues::UpdateService is being
- # changed to take `container` as param. So we are adding this workaround in the meantime.
- def self.constructor_container_arg(value)
- { container: value }
- end
-
- def handle_quick_actions(issue)
- # Do not handle quick actions unless the work item is the default Issue.
- # The available quick actions for a work item depend on its type and widgets.
- return unless issue.work_item_type.default_issue?
-
- super
- end
-
def handle_date_changes(issue)
return unless issue.previous_changes.slice('due_date', 'start_date').any?
@@ -175,35 +159,6 @@ module Issues
end
end
- def handle_milestone_change(issue)
- return unless issue.previous_changes.include?('milestone_id')
-
- invalidate_milestone_issue_counters(issue)
- send_milestone_change_notification(issue)
- GraphqlTriggers.issuable_milestone_updated(issue)
- end
-
- def invalidate_milestone_issue_counters(issue)
- issue.previous_changes['milestone_id'].each do |milestone_id|
- next unless milestone_id
-
- milestone = Milestone.find_by_id(milestone_id)
-
- delete_milestone_closed_issue_counter_cache(milestone)
- delete_milestone_total_issue_counter_cache(milestone)
- end
- end
-
- def send_milestone_change_notification(issue)
- return if skip_milestone_email
-
- if issue.milestone.nil?
- notification_service.async.removed_milestone(issue, current_user)
- else
- notification_service.async.changed_milestone(issue, issue.milestone, current_user)
- end
- end
-
def handle_added_mentions(issue, old_mentioned_users)
added_mentions = issue.mentioned_users(current_user) - old_mentioned_users
@@ -229,7 +184,7 @@ module Issues
end
def do_handle_issue_type_change(issue)
- SystemNoteService.change_issue_type(issue, current_user)
+ SystemNoteService.change_issue_type(issue, current_user, issue.issue_type_before_last_save)
::IncidentManagement::IssuableEscalationStatuses::CreateService.new(issue).execute if issue.supports_escalation?
end
diff --git a/app/services/issues/zoom_link_service.rb b/app/services/issues/zoom_link_service.rb
index 4144c293990..bfd3e6a945f 100644
--- a/app/services/issues/zoom_link_service.rb
+++ b/app/services/issues/zoom_link_service.rb
@@ -3,7 +3,7 @@
module Issues
class ZoomLinkService < Issues::BaseService
def initialize(container:, current_user:, params:)
- super(project: container, current_user: current_user, params: params)
+ super
@issue = params.fetch(:issue)
@added_meeting = ZoomMeeting.canonical_meeting(@issue)
diff --git a/app/services/jira_connect/sync_service.rb b/app/services/jira_connect/sync_service.rb
index 92255711399..497c282072d 100644
--- a/app/services/jira_connect/sync_service.rb
+++ b/app/services/jira_connect/sync_service.rb
@@ -31,7 +31,9 @@ module JiraConnect
jira_response: response&.to_json
}
- if response && response['errorMessages'].present?
+ has_errors = response && (response['errorMessage'].present? || response['errorMessages'].present?)
+
+ if has_errors
logger.error(message)
else
logger.info(message)
diff --git a/app/services/jira_connect_installations/proxy_lifecycle_event_service.rb b/app/services/jira_connect_installations/proxy_lifecycle_event_service.rb
index d94d9e1324e..9f3b4a37672 100644
--- a/app/services/jira_connect_installations/proxy_lifecycle_event_service.rb
+++ b/app/services/jira_connect_installations/proxy_lifecycle_event_service.rb
@@ -82,9 +82,9 @@ module JiraConnectInstallations
Gitlab::IntegrationsLogger.info(
integration: 'JiraConnect',
message: 'Proxy lifecycle event received error response',
- event_type: event,
- status_code: status_code,
- body: body
+ jira_event_type: event,
+ jira_status_code: status_code,
+ jira_body: body
)
end
end
diff --git a/app/services/keys/last_used_service.rb b/app/services/keys/last_used_service.rb
index daef544bac0..3683c03b7a4 100644
--- a/app/services/keys/last_used_service.rb
+++ b/app/services/keys/last_used_service.rb
@@ -2,7 +2,7 @@
module Keys
class LastUsedService
- TIMEOUT = 1.day.to_i
+ TIMEOUT = 1.day
attr_reader :key
@@ -12,26 +12,24 @@ module Keys
end
def execute
+ return unless update?
+
# We _only_ want to update last_used_at and not also updated_at (which
# would be updated when using #touch).
- key.update_column(:last_used_at, Time.zone.now) if update?
+ key.update_column(:last_used_at, Time.zone.now)
end
- def update?
- return false if ::Gitlab::Database.read_only?
-
- last_used = key.last_used_at
+ def execute_async
+ return unless update?
- return false if last_used && (Time.zone.now - last_used) <= TIMEOUT
-
- !!redis_lease.try_obtain
+ ::SshKeys::UpdateLastUsedAtWorker.perform_async(key.id)
end
- private
+ def update?
+ return false if ::Gitlab::Database.read_only?
- def redis_lease
- Gitlab::ExclusiveLease
- .new("key_update_last_used_at:#{key.id}", timeout: TIMEOUT)
+ last_used = key.last_used_at
+ last_used.blank? || last_used <= TIMEOUT.ago
end
end
end
diff --git a/app/services/keys/revoke_service.rb b/app/services/keys/revoke_service.rb
index 42ea9ab73be..9684d4e461e 100644
--- a/app/services/keys/revoke_service.rb
+++ b/app/services/keys/revoke_service.rb
@@ -13,8 +13,6 @@ module Keys
private
def unverify_associated_signatures(key)
- return unless Feature.enabled?(:revoke_ssh_signatures)
-
key.ssh_signatures.each_batch do |batch|
batch.update_all(
verification_status: CommitSignatures::SshSignature.verification_statuses[:revoked_key],
diff --git a/app/services/markup/rendering_service.rb b/app/services/markup/rendering_service.rb
index cd89c170efa..104bdb6dd41 100644
--- a/app/services/markup/rendering_service.rb
+++ b/app/services/markup/rendering_service.rb
@@ -52,6 +52,8 @@ module Markup
def other_markup_unsafe
Gitlab::OtherMarkup.render(file_name, text, context)
+ rescue GitHub::Markup::CommandError
+ ActionController::Base.helpers.simple_format(text)
end
def postprocess(html)
diff --git a/app/services/mattermost/create_team_service.rb b/app/services/mattermost/create_team_service.rb
index 9f6efab1e43..dc448cbc5eb 100644
--- a/app/services/mattermost/create_team_service.rb
+++ b/app/services/mattermost/create_team_service.rb
@@ -9,7 +9,7 @@ module Mattermost
def execute
# The user that creates the team will be Team Admin
- ::Mattermost::Team.new(current_user).create(@group.mattermost_team_params)
+ ::Mattermost::Team.new(current_user).create(**@group.mattermost_team_params)
rescue ::Mattermost::ClientError => e
@group.errors.add(:mattermost_team, e.message)
end
diff --git a/app/services/members/approve_access_request_service.rb b/app/services/members/approve_access_request_service.rb
index 20f96ac2949..f8c91fbae7d 100644
--- a/app/services/members/approve_access_request_service.rb
+++ b/app/services/members/approve_access_request_service.rb
@@ -18,7 +18,7 @@ module Members
def after_execute(member:, skip_log_audit_event:)
super
- resolve_access_request_todos(current_user, member)
+ resolve_access_request_todos(member)
end
def validate_access!(access_requester)
diff --git a/app/services/members/base_service.rb b/app/services/members/base_service.rb
index 801f77ae082..80fba33b20e 100644
--- a/app/services/members/base_service.rb
+++ b/app/services/members/base_service.rb
@@ -53,8 +53,8 @@ module Members
end
end
- def resolve_access_request_todos(current_user, requester)
- todo_service.resolve_access_request_todos(current_user, requester)
+ def resolve_access_request_todos(member)
+ todo_service.resolve_access_request_todos(member)
end
def enqueue_delete_todos(member)
diff --git a/app/services/members/creator_service.rb b/app/services/members/creator_service.rb
index 3ce8390d07d..699c5b94c53 100644
--- a/app/services/members/creator_service.rb
+++ b/app/services/members/creator_service.rb
@@ -13,8 +13,29 @@ module Members
Gitlab::Access.sym_options_with_owner
end
- def add_members( # rubocop:disable Metrics/ParameterLists
- source,
+ # Add members to sources with passed access option
+ #
+ # access can be an integer representing a access code
+ # or symbol like :maintainer representing role
+ #
+ # Ex.
+ # add_members(
+ # sources,
+ # user_ids,
+ # Member::MAINTAINER
+ # )
+ #
+ # add_members(
+ # sources,
+ # user_ids,
+ # :maintainer
+ # )
+ #
+ # @param sources [Group, Project, Array<Group>, Array<Project>, Group::ActiveRecord_Relation,
+ # Project::ActiveRecord_Relation] - Can't be an array of source ids because we don't know the type of source.
+ # @return Array<Member>
+ def add_members(
+ sources,
invitees,
access_level,
current_user: nil,
@@ -22,52 +43,58 @@ module Members
tasks_to_be_done: [],
tasks_project_id: nil,
ldap: nil
- )
+ ) # rubocop:disable Metrics/ParameterLists
return [] unless invitees.present?
- # If this user is attempting to manage Owner members and doesn't have permission, do not allow
- return [] if managing_owners?(current_user, access_level) && cannot_manage_owners?(source, current_user)
-
- emails, users, existing_members = parse_users_list(source, invitees)
+ sources = Array.wrap(sources) if sources.is_a?(ApplicationRecord) # For single source
Member.transaction do
- common_arguments = {
- source: source,
- access_level: access_level,
- existing_members: existing_members,
- current_user: current_user,
- expires_at: expires_at,
- tasks_to_be_done: tasks_to_be_done,
- tasks_project_id: tasks_project_id,
- ldap: ldap
- }
-
- members = emails.map do |email|
- new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute
- end
+ sources.flat_map do |source|
+ # If this user is attempting to manage Owner members and doesn't have permission, do not allow
+ next [] if managing_owners?(current_user, access_level) && cannot_manage_owners?(source, current_user)
+
+ emails, users, existing_members = parse_users_list(source, invitees)
+
+ common_arguments = {
+ source: source,
+ access_level: access_level,
+ existing_members: existing_members,
+ current_user: current_user,
+ expires_at: expires_at,
+ tasks_to_be_done: tasks_to_be_done,
+ tasks_project_id: tasks_project_id,
+ ldap: ldap
+ }
+
+ members = emails.map do |email|
+ new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute
+ end
- members += users.map do |user|
- new(invitee: user, **common_arguments).execute
- end
+ members += users.map do |user|
+ new(invitee: user, **common_arguments).execute
+ end
- members
+ members
+ end
end
end
- def add_member( # rubocop:disable Metrics/ParameterLists
+ def add_member(
source,
invitee,
access_level,
current_user: nil,
expires_at: nil,
ldap: nil
- )
- add_members(source,
- [invitee],
- access_level,
- current_user: current_user,
- expires_at: expires_at,
- ldap: ldap).first
+ ) # rubocop:disable Metrics/ParameterLists
+ add_members(
+ source,
+ [invitee],
+ access_level,
+ current_user: current_user,
+ expires_at: expires_at,
+ ldap: ldap
+ ).first
end
private
@@ -217,8 +244,7 @@ module Members
end
def approve_request
- ::Members::ApproveAccessRequestService.new(current_user,
- access_level: access_level)
+ ::Members::ApproveAccessRequestService.new(current_user, access_level: access_level)
.execute(
member,
skip_authorization: ldap || skip_authorization?,
diff --git a/app/services/members/destroy_service.rb b/app/services/members/destroy_service.rb
index dd84b890385..e432016795d 100644
--- a/app/services/members/destroy_service.rb
+++ b/app/services/members/destroy_service.rb
@@ -4,7 +4,15 @@ module Members
class DestroyService < Members::BaseService
include Gitlab::ExclusiveLeaseHelpers
- def execute(member, skip_authorization: false, skip_subresources: false, unassign_issuables: false, destroy_bot: false)
+ def execute(
+ member,
+ skip_authorization: false,
+ skip_subresources: false,
+ unassign_issuables: false,
+ destroy_bot: false,
+ skip_saml_identity: false
+ )
+
unless skip_authorization
raise Gitlab::Access::AccessDeniedError unless authorized?(member, destroy_bot)
@@ -15,18 +23,39 @@ module Members
@skip_auth = skip_authorization
if a_group_owner?(member)
- process_destroy_of_group_owner_member(member, skip_subresources, unassign_issuables)
+ process_destroy_of_group_owner_member(member, skip_subresources, skip_saml_identity)
else
destroy_member(member)
- destroy_data_related_to_member(member, skip_subresources, unassign_issuables)
+ destroy_data_related_to_member(member, skip_subresources, skip_saml_identity)
end
+ enqueue_jobs_that_needs_to_be_run_only_once_per_hierarchy(member, unassign_issuables)
+
member
end
+ # We use this to mark recursive calls made to this service from within the same service.
+ # We do this so as to help us run some tasks that needs to be run only once per hierarchy, and not recursively.
+ def mark_as_recursive_call
+ @recursive_call = true
+ end
+
private
- def process_destroy_of_group_owner_member(member, skip_subresources, unassign_issuables)
+ # These actions need to be executed only once per hierarchy because the underlying services
+ # apply these actions to the entire hierarchy anyway, so there is no need to execute them recursively.
+ def enqueue_jobs_that_needs_to_be_run_only_once_per_hierarchy(member, unassign_issuables)
+ return if recursive_call?
+
+ enqueue_delete_todos(member)
+ enqueue_unassign_issuables(member) if unassign_issuables
+ end
+
+ def recursive_call?
+ @recursive_call == true
+ end
+
+ def process_destroy_of_group_owner_member(member, skip_subresources, skip_saml_identity)
# Deleting 2 different group owners via the API in quick succession could lead to
# wrong results for the `last_owner?` check due to race conditions. To prevent this
# we wrap both the last_owner? check and the deletes of owners within a lock.
@@ -40,34 +69,32 @@ module Members
end
# deletion of related data does not have to be within the lock.
- destroy_data_related_to_member(member, skip_subresources, unassign_issuables) unless last_group_owner
+ destroy_data_related_to_member(member, skip_subresources, skip_saml_identity) unless last_group_owner
end
def destroy_member(member)
member.destroy
end
- def destroy_data_related_to_member(member, skip_subresources, unassign_issuables)
+ def destroy_data_related_to_member(member, skip_subresources, skip_saml_identity)
member.user&.invalidate_cache_counts
- delete_member_associations(member, skip_subresources, unassign_issuables)
+ delete_member_associations(member, skip_subresources, skip_saml_identity)
end
def a_group_owner?(member)
member.is_a?(GroupMember) && member.owner?
end
- def delete_member_associations(member, skip_subresources, unassign_issuables)
+ def delete_member_associations(member, skip_subresources, skip_saml_identity)
if member.request? && member.user != current_user
notification_service.decline_access_request(member)
end
delete_subresources(member) unless skip_subresources
delete_project_invitations_by(member) unless skip_subresources
- resolve_access_request_todos(current_user, member)
- enqueue_delete_todos(member)
- enqueue_unassign_issuables(member) if unassign_issuables
+ resolve_access_request_todos(member)
- after_execute(member: member)
+ after_execute(member: member, skip_saml_identity: skip_saml_identity)
end
def authorized?(member, destroy_bot)
@@ -110,13 +137,17 @@ module Members
def destroy_project_members(members)
members.each do |project_member|
- self.class.new(current_user).execute(project_member, skip_authorization: @skip_auth)
+ service = self.class.new(current_user)
+ service.mark_as_recursive_call
+ service.execute(project_member, skip_authorization: @skip_auth)
end
end
def destroy_group_members(members)
members.each do |group_member|
- self.class.new(current_user).execute(group_member, skip_authorization: @skip_auth, skip_subresources: true)
+ service = self.class.new(current_user)
+ service.mark_as_recursive_call
+ service.execute(group_member, skip_authorization: @skip_auth, skip_subresources: true)
end
end
diff --git a/app/services/merge_requests/add_context_service.rb b/app/services/merge_requests/add_context_service.rb
index 2ce6073050e..a9ef3e85911 100644
--- a/app/services/merge_requests/add_context_service.rb
+++ b/app/services/merge_requests/add_context_service.rb
@@ -57,7 +57,7 @@ module MergeRequests
def build_context_commit_rows(merge_request_id, commits)
commits.map.with_index do |commit, index|
# generate context commit information for given commit
- commit_hash = commit.to_hash.except(:parent_ids)
+ commit_hash = commit.to_hash.except(:parent_ids, :referenced_by)
sha = Gitlab::Database::ShaAttribute.serialize(commit_hash.delete(:id))
commit_hash.merge(
merge_request_id: merge_request_id,
@@ -75,7 +75,7 @@ module MergeRequests
diff_order = 0
commits.flat_map.with_index do |commit, index|
- commit_hash = commit.to_hash.except(:parent_ids)
+ commit_hash = commit.to_hash.except(:parent_ids, :referenced_by)
sha = Gitlab::Database::ShaAttribute.serialize(commit_hash.delete(:id))
# generate context commit diff information for given commit
diffs = commit.diffs
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index 11251e56ee3..f174778e12e 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -7,6 +7,7 @@ module MergeRequests
def execute(merge_request)
merge_request.ensure_merge_request_diff
+ execute_hooks(merge_request)
prepare_for_mergeability(merge_request)
prepare_merge_request(merge_request)
@@ -39,8 +40,6 @@ module MergeRequests
Gitlab::UsageDataCounters::MergeRequestCounter.count(:create)
link_lfs_objects(merge_request)
-
- delete_milestone_total_merge_requests_counter_cache(merge_request.milestone)
end
def link_lfs_objects(merge_request)
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index f6cbe889128..ec8a17162ca 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -5,6 +5,12 @@ module MergeRequests
extend ::Gitlab::Utils::Override
include MergeRequests::AssignsMergeParams
+ delegate :repository, to: :project
+
+ def initialize(project:, current_user: nil, params: {})
+ super(container: project, current_user: current_user, params: params)
+ end
+
def create_note(merge_request, state = merge_request.state)
SystemNoteService.change_status(merge_request, merge_request.target_project, current_user, state, nil)
end
@@ -20,6 +26,10 @@ module MergeRequests
end
def execute_hooks(merge_request, action = 'open', old_rev: nil, old_associations: {})
+ # NOTE: Due to the async merge request diffs generation, we need to skip this for CreateService and execute it in
+ # AfterCreateService instead so that the webhook consumers receive the update when diffs are ready.
+ return if merge_request.skip_ensure_merge_request_diff
+
merge_data = Gitlab::Lazy.new { hook_data(merge_request, action, old_rev: old_rev, old_associations: old_associations) }
merge_request.project.execute_hooks(merge_data, :merge_request_hooks)
merge_request.project.execute_integrations(merge_data, :merge_request_hooks)
@@ -94,6 +104,10 @@ module MergeRequests
private
+ def self.constructor_container_arg(value)
+ { project: value }
+ end
+
def refresh_pipelines_on_merge_requests(merge_request, allow_duplicate: false)
create_pipeline_for(merge_request, current_user, async: true, allow_duplicate: allow_duplicate)
end
@@ -253,12 +267,6 @@ module MergeRequests
merge_request.update(merge_error: message) if save_message_on_model
end
- def delete_milestone_total_merge_requests_counter_cache(milestone)
- return unless milestone
-
- Milestones::MergeRequestsCountService.new(milestone).delete_cache
- end
-
def trigger_merge_request_reviewers_updated(merge_request)
GraphqlTriggers.merge_request_reviewers_updated(merge_request)
end
diff --git a/app/services/merge_requests/build_service.rb b/app/services/merge_requests/build_service.rb
index b9a681f29db..3a7b577d59a 100644
--- a/app/services/merge_requests/build_service.rb
+++ b/app/services/merge_requests/build_service.rb
@@ -16,6 +16,8 @@ module MergeRequests
merge_request.source_project = find_source_project
merge_request.target_project = find_target_project
+ initialize_callbacks!(merge_request)
+
process_params
merge_request.compare_commits = []
@@ -40,17 +42,17 @@ module MergeRequests
attr_accessor :merge_request
delegate :target_branch,
- :target_branch_ref,
- :target_project,
- :source_branch,
- :source_branch_ref,
- :source_project,
- :compare_commits,
- :draft_title,
- :description,
- :first_multiline_commit,
- :errors,
- to: :merge_request
+ :target_branch_ref,
+ :target_project,
+ :source_branch,
+ :source_branch_ref,
+ :source_project,
+ :compare_commits,
+ :draft_title,
+ :description,
+ :first_multiline_commit,
+ :errors,
+ to: :merge_request
def force_remove_source_branch
if params.key?(:force_remove_source_branch)
diff --git a/app/services/merge_requests/create_service.rb b/app/services/merge_requests/create_service.rb
index 75e1adec41b..39e1594d215 100644
--- a/app/services/merge_requests/create_service.rb
+++ b/app/services/merge_requests/create_service.rb
@@ -4,6 +4,7 @@ module MergeRequests
class CreateService < MergeRequests::BaseService
def execute
set_projects!
+ set_default_attributes!
merge_request = MergeRequest.new
merge_request.target_project = @project
@@ -61,6 +62,10 @@ module MergeRequests
raise Gitlab::Access::AccessDeniedError
end
end
+
+ def set_default_attributes!
+ # Implemented in EE
+ end
end
end
diff --git a/app/services/merge_requests/ff_merge_service.rb b/app/services/merge_requests/ff_merge_service.rb
index 6e1d1b6ad23..1a83bbf9de6 100644
--- a/app/services/merge_requests/ff_merge_service.rb
+++ b/app/services/merge_requests/ff_merge_service.rb
@@ -14,10 +14,12 @@ module MergeRequests
override :execute_git_merge
def execute_git_merge
- repository.ff_merge(current_user,
- source,
- merge_request.target_branch,
- merge_request: merge_request)
+ repository.ff_merge(
+ current_user,
+ source,
+ merge_request.target_branch,
+ merge_request: merge_request
+ )
end
override :merge_success_data
diff --git a/app/services/merge_requests/handle_assignees_change_service.rb b/app/services/merge_requests/handle_assignees_change_service.rb
index 51be4690af4..835d56a7070 100644
--- a/app/services/merge_requests/handle_assignees_change_service.rb
+++ b/app/services/merge_requests/handle_assignees_change_service.rb
@@ -15,6 +15,7 @@ module MergeRequests
def execute(merge_request, old_assignees, options = {})
create_assignee_note(merge_request, old_assignees)
notification_service.async.reassigned_merge_request(merge_request, current_user, old_assignees.to_a)
+ Gitlab::ResourceEvents::AssignmentEventRecorder.new(parent: merge_request, old_assignees: old_assignees).record
todo_service.reassigned_assignable(merge_request, current_user, old_assignees)
new_assignees = merge_request.assignees - old_assignees
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index e6b0ffbf716..10301774f96 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -115,8 +115,7 @@ module MergeRequests
def try_merge
execute_git_merge
rescue Gitlab::Git::PreReceiveError => e
- raise MergeError,
- "Something went wrong during merge pre-receive hook. #{e.message}".strip
+ raise MergeError, "Something went wrong during merge pre-receive hook. #{e.message}".strip
rescue StandardError => e
handle_merge_error(log_message: e.message)
raise_error(GENERIC_ERROR_MESSAGE)
@@ -180,9 +179,7 @@ module MergeRequests
end
def log_payload(message)
- Gitlab::ApplicationContext.current
- .merge(merge_request_info: merge_request_info,
- message: message)
+ Gitlab::ApplicationContext.current.merge(merge_request_info: merge_request_info, message: message)
end
def merge_request_info
diff --git a/app/services/merge_requests/merge_to_ref_service.rb b/app/services/merge_requests/merge_to_ref_service.rb
index 8519cbac3cb..1bd26f06e41 100644
--- a/app/services/merge_requests/merge_to_ref_service.rb
+++ b/app/services/merge_requests/merge_to_ref_service.rb
@@ -25,9 +25,7 @@ module MergeRequests
commit = project.commit(commit_id)
target_id, source_id = commit.parent_ids
- success(commit_id: commit.id,
- target_id: target_id,
- source_id: source_id)
+ success(commit_id: commit.id, target_id: target_id, source_id: source_id)
rescue MergeError, ArgumentError => error
error(error.message)
end
diff --git a/app/services/merge_requests/mergeability/detailed_merge_status_service.rb b/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
index d25234183fd..987d6ce8e9f 100644
--- a/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
+++ b/app/services/merge_requests/mergeability/detailed_merge_status_service.rb
@@ -10,6 +10,7 @@ module MergeRequests
end
def execute
+ return :preparing if preparing?
return :checking if checking?
return :unchecked if unchecked?
@@ -31,8 +32,12 @@ module MergeRequests
attr_reader :merge_request, :checks, :ci_check
+ def preparing?
+ merge_request.preparing? && !merge_request.merge_request_diff.persisted?
+ end
+
def checking?
- merge_request.cannot_be_merged_rechecking? || merge_request.preparing? || merge_request.checking?
+ merge_request.cannot_be_merged_rechecking? || merge_request.checking?
end
def unchecked?
diff --git a/app/services/merge_requests/push_options_handler_service.rb b/app/services/merge_requests/push_options_handler_service.rb
index e9abafceb13..1890addf692 100644
--- a/app/services/merge_requests/push_options_handler_service.rb
+++ b/app/services/merge_requests/push_options_handler_service.rb
@@ -4,8 +4,7 @@ module MergeRequests
class PushOptionsHandlerService < ::BaseProjectService
LIMIT = 10
- attr_reader :errors, :changes,
- :push_options, :target_project
+ attr_reader :errors, :changes, :push_options, :target_project
def initialize(project:, current_user:, changes:, push_options:, params: {})
super(project: project, current_user: current_user, params: params)
@@ -112,8 +111,10 @@ module MergeRequests
merge_request = ::MergeRequests::CreateService.new(
project: project,
current_user: current_user,
- params: merge_request.attributes.merge(assignee_ids: merge_request.assignee_ids,
- label_ids: merge_request.label_ids)
+ params: merge_request.attributes.merge(
+ assignee_ids: merge_request.assignee_ids,
+ label_ids: merge_request.label_ids
+ )
).execute
end
diff --git a/app/services/merge_requests/rebase_service.rb b/app/services/merge_requests/rebase_service.rb
index 792f1728b88..6248baea4ea 100644
--- a/app/services/merge_requests/rebase_service.rb
+++ b/app/services/merge_requests/rebase_service.rb
@@ -63,3 +63,5 @@ module MergeRequests
end
end
end
+
+::MergeRequests::RebaseService.prepend_mod
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index 61831a624c7..d6740cdf1ac 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -127,16 +127,23 @@ module MergeRequests
merge_requests_array = merge_requests.to_a + merge_requests_from_forks.to_a
filter_merge_requests(merge_requests_array).each do |merge_request|
+ skip_merge_status_trigger = true
+
if branch_and_project_match?(merge_request) || @push.force_push?
merge_request.reload_diff(current_user)
# Clear existing merge error if the push were directed at the
# source branch. Clearing the error when the target branch
# changes will hide the error from the user.
merge_request.merge_error = nil
+
+ # Don't skip trigger since we to update the MR's merge status in real-time
+ # when the push if for the MR's source branch and project.
+ skip_merge_status_trigger = false
elsif merge_request.merge_request_diff.includes_any_commits?(push_commit_ids)
merge_request.reload_diff(current_user)
end
+ merge_request.skip_merge_status_trigger = skip_merge_status_trigger
merge_request.mark_as_unchecked
end
@@ -240,9 +247,11 @@ module MergeRequests
mr_commit_ids.include?(commit.id)
end
- SystemNoteService.add_commits(merge_request, merge_request.project,
- @current_user, new_commits,
- existing_commits, @push.oldrev)
+ SystemNoteService.add_commits(
+ merge_request, merge_request.project,
+ @current_user, new_commits,
+ existing_commits, @push.oldrev
+ )
notification_service.push_to_merge_request(merge_request, @current_user, new_commits: new_commits, existing_commits: existing_commits)
end
diff --git a/app/services/merge_requests/reload_diffs_service.rb b/app/services/merge_requests/reload_diffs_service.rb
index c64b2e99b52..2c6ec9333b2 100644
--- a/app/services/merge_requests/reload_diffs_service.rb
+++ b/app/services/merge_requests/reload_diffs_service.rb
@@ -22,9 +22,11 @@ module MergeRequests
def update_diff_discussion_positions(old_diff_refs)
new_diff_refs = merge_request.diff_refs
- merge_request.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
- new_diff_refs: new_diff_refs,
- current_user: current_user)
+ merge_request.update_diff_discussion_positions(
+ old_diff_refs: old_diff_refs,
+ new_diff_refs: new_diff_refs,
+ current_user: current_user
+ )
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/services/merge_requests/retarget_chain_service.rb b/app/services/merge_requests/retarget_chain_service.rb
index 33aae4184ae..b4b05ffb08c 100644
--- a/app/services/merge_requests/retarget_chain_service.rb
+++ b/app/services/merge_requests/retarget_chain_service.rb
@@ -21,13 +21,14 @@ module MergeRequests
# Update only MRs on projects that we have access to
next unless can?(current_user, :update_merge_request, other_merge_request.source_project)
- ::MergeRequests::UpdateService
- .new(project: other_merge_request.source_project, current_user: current_user,
- params: {
- target_branch: merge_request.target_branch,
- target_branch_was_deleted: true
- })
- .execute(other_merge_request)
+ ::MergeRequests::UpdateService.new(
+ project: other_merge_request.source_project,
+ current_user: current_user,
+ params: {
+ target_branch: merge_request.target_branch,
+ target_branch_was_deleted: true
+ }
+ ).execute(other_merge_request)
end
end
end
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index 255d96f4969..aaed01403cb 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -36,7 +36,6 @@ module MergeRequests
end
handle_target_branch_change(merge_request)
- handle_milestone_change(merge_request)
handle_draft_status_change(merge_request, changed_fields)
track_title_and_desc_edits(changed_fields)
@@ -204,25 +203,6 @@ module MergeRequests
)
end
- def handle_milestone_change(merge_request)
- return if skip_milestone_email
-
- return unless merge_request.previous_changes.include?('milestone_id')
-
- merge_request_activity_counter.track_milestone_changed_action(user: current_user)
-
- previous_milestone = Milestone.find_by_id(merge_request.previous_changes['milestone_id'].first)
- delete_milestone_total_merge_requests_counter_cache(previous_milestone)
-
- if merge_request.milestone.nil?
- notification_service.async.removed_milestone(merge_request, current_user)
- else
- notification_service.async.changed_milestone(merge_request, merge_request.milestone, current_user)
-
- delete_milestone_total_merge_requests_counter_cache(merge_request.milestone)
- end
- end
-
def create_branch_change_note(issuable, branch_type, event_type, old_branch, new_branch)
SystemNoteService.change_branch(
issuable, issuable.project, current_user, branch_type, event_type,
@@ -282,8 +262,6 @@ module MergeRequests
assignees_service.execute(merge_request)
when :spend_time
add_time_spent_service.execute(merge_request)
- else
- nil
end
end
diff --git a/app/services/metrics/dashboard/annotations/create_service.rb b/app/services/metrics/dashboard/annotations/create_service.rb
index b86fa82a5e8..47e9afa36b9 100644
--- a/app/services/metrics/dashboard/annotations/create_service.rb
+++ b/app/services/metrics/dashboard/annotations/create_service.rb
@@ -26,7 +26,7 @@ module Metrics
attr_reader :user, :params
def authorize_environment_access(options)
- if environment.nil? || Ability.allowed?(user, :create_metrics_dashboard_annotation, project)
+ if environment.nil? || Ability.allowed?(user, :admin_metrics_dashboard_annotation, project)
options[:environment] = environment
success(options)
else
@@ -35,7 +35,7 @@ module Metrics
end
def authorize_cluster_access(options)
- if cluster.nil? || Ability.allowed?(user, :create_metrics_dashboard_annotation, cluster)
+ if cluster.nil? || Ability.allowed?(user, :admin_metrics_dashboard_annotation, cluster)
options[:cluster] = cluster
success(options)
else
diff --git a/app/services/metrics/dashboard/annotations/delete_service.rb b/app/services/metrics/dashboard/annotations/delete_service.rb
index 3cb22f8d3da..34918c89304 100644
--- a/app/services/metrics/dashboard/annotations/delete_service.rb
+++ b/app/services/metrics/dashboard/annotations/delete_service.rb
@@ -24,7 +24,7 @@ module Metrics
attr_reader :user, :annotation
def authorize_action(_options)
- if Ability.allowed?(user, :delete_metrics_dashboard_annotation, annotation)
+ if Ability.allowed?(user, :admin_metrics_dashboard_annotation, annotation)
success
else
error(s_('MetricsDashboardAnnotation|You are not authorized to delete this annotation'))
diff --git a/app/services/metrics/dashboard/clone_dashboard_service.rb b/app/services/metrics/dashboard/clone_dashboard_service.rb
index d9bd9423a1b..18623ad336d 100644
--- a/app/services/metrics/dashboard/clone_dashboard_service.rb
+++ b/app/services/metrics/dashboard/clone_dashboard_service.rb
@@ -16,10 +16,6 @@ module Metrics
::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter
].freeze,
- ::Metrics::Dashboard::SelfMonitoringDashboardService::DASHBOARD_PATH => [
- ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter
- ].freeze,
-
::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH => [
::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter
].freeze
diff --git a/app/services/metrics/dashboard/self_monitoring_dashboard_service.rb b/app/services/metrics/dashboard/self_monitoring_dashboard_service.rb
deleted file mode 100644
index 62264281a02..00000000000
--- a/app/services/metrics/dashboard/self_monitoring_dashboard_service.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-# Fetches the self-monitoring metrics dashboard and formats the output.
-# Use Gitlab::Metrics::Dashboard::Finder to retrieve dashboards.
-module Metrics
- module Dashboard
- class SelfMonitoringDashboardService < ::Metrics::Dashboard::PredefinedDashboardService
- DASHBOARD_PATH = 'config/prometheus/self_monitoring_default.yml'
- DASHBOARD_NAME = N_('Overview')
-
- # SHA256 hash of dashboard content
- DASHBOARD_VERSION = '0f7ade2022e09f1a1da8e883cc95d84b9557e1e0e9b015c51eb964296aa73098'
-
- SEQUENCE = [
- STAGES::CustomMetricsInserter,
- STAGES::MetricEndpointInserter,
- STAGES::VariableEndpointInserter,
- STAGES::PanelIdsInserter
- ].freeze
-
- class << self
- def valid_params?(params)
- matching_dashboard?(params[:dashboard_path]) || self_monitoring_project?(params)
- end
-
- def all_dashboard_paths(_project)
- [{
- path: DASHBOARD_PATH,
- display_name: _(DASHBOARD_NAME),
- default: true,
- system_dashboard: true,
- out_of_the_box_dashboard: out_of_the_box_dashboard?
- }]
- end
-
- def self_monitoring_project?(params)
- params[:dashboard_path].nil? && params[:environment]&.project&.self_monitoring?
- end
- end
-
- private
-
- def dashboard_version
- DASHBOARD_VERSION
- end
- end
- end
-end
diff --git a/app/services/metrics/global_metrics_update_service.rb b/app/services/metrics/global_metrics_update_service.rb
new file mode 100644
index 00000000000..356de58ba2e
--- /dev/null
+++ b/app/services/metrics/global_metrics_update_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Metrics
+ # Update metrics regarding GitLab instance wide
+ #
+ # Anything that is not specific to a machine, process, request or any other context
+ # can be updated from this services.
+ #
+ # Examples of metrics that qualify:
+ # * Global counters (instance users, instance projects...)
+ # * State of settings stored in the database (whether a feature is active or not, tuning values...)
+ #
+ class GlobalMetricsUpdateService
+ def execute
+ return unless ::Gitlab::Metrics.prometheus_metrics_enabled?
+
+ maintenance_mode_metric.set({}, (::Gitlab.maintenance_mode? ? 1 : 0))
+ end
+
+ def maintenance_mode_metric
+ ::Gitlab::Metrics.gauge(:gitlab_maintenance_mode, 'Is GitLab Maintenance Mode enabled?')
+ end
+ end
+end
diff --git a/app/services/metrics_service.rb b/app/services/metrics_service.rb
index d27328f89cd..f39cc1a8534 100644
--- a/app/services/metrics_service.rb
+++ b/app/services/metrics_service.rb
@@ -4,7 +4,11 @@ require 'prometheus/client/formats/text'
class MetricsService
def prometheus_metrics_text
- ::Prometheus::Client::Formats::Text.marshal_multiprocess(multiprocess_metrics_path)
+ if Feature.enabled?(:prom_metrics_rust)
+ ::Prometheus::Client::Formats::Text.marshal_multiprocess(multiprocess_metrics_path, use_rust: true)
+ else
+ ::Prometheus::Client::Formats::Text.marshal_multiprocess(multiprocess_metrics_path)
+ end
end
def metrics_text
diff --git a/app/services/ml/experiment_tracking/candidate_repository.rb b/app/services/ml/experiment_tracking/candidate_repository.rb
index f1fd93d7816..2399da3e182 100644
--- a/app/services/ml/experiment_tracking/candidate_repository.rb
+++ b/app/services/ml/experiment_tracking/candidate_repository.rb
@@ -10,14 +10,15 @@ module Ml
@user = user
end
- def by_iid(iid)
- ::Ml::Candidate.with_project_id_and_iid(project.id, iid)
+ def by_eid(eid)
+ ::Ml::Candidate.with_project_id_and_eid(project.id, eid)
end
def create!(experiment, start_time, tags = nil, name = nil)
candidate = experiment.candidates.create!(
user: user,
name: candidate_name(name, tags),
+ project: project,
start_time: start_time || 0
)
@@ -47,6 +48,8 @@ module Ml
end
def add_tag!(candidate, name, value)
+ handle_gitlab_tags(candidate, [{ key: name, value: value }])
+
candidate.metadata.create!(name: name, value: value)
end
@@ -60,11 +63,23 @@ module Ml
end
def add_tags(candidate, tag_definitions)
+ return unless tag_definitions.present?
+
+ handle_gitlab_tags(candidate, tag_definitions)
+
insert_many(candidate, tag_definitions, ::Ml::CandidateMetadata)
end
private
+ def handle_gitlab_tags(candidate, tag_definitions)
+ return unless tag_definitions.any? { |t| t[:key]&.starts_with?('gitlab.') }
+
+ Ml::ExperimentTracking::HandleCandidateGitlabMetadataService
+ .new(candidate, tag_definitions)
+ .execute
+ end
+
def timestamps
current_time = Time.zone.now
diff --git a/app/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service.rb b/app/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service.rb
new file mode 100644
index 00000000000..918e4d10ac3
--- /dev/null
+++ b/app/services/ml/experiment_tracking/handle_candidate_gitlab_metadata_service.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Ml
+ module ExperimentTracking
+ class HandleCandidateGitlabMetadataService
+ def initialize(candidate, metadata)
+ @candidate = candidate
+ @metadata = metadata.index_by { |m| m[:key] }
+ end
+
+ def execute
+ handle_build_metadata(@metadata['gitlab.CI_JOB_ID'])
+
+ @candidate.save
+ end
+
+ private
+
+ def handle_build_metadata(build_metadata)
+ return unless build_metadata
+
+ build = Ci::Build.find_by_id(build_metadata[:value])
+
+ raise ArgumentError, 'gitlab.CI_JOB_ID must refer to an existing build' unless build
+
+ @candidate.ci_build = build
+ end
+ end
+ end
+end
diff --git a/app/services/notes/build_service.rb b/app/services/notes/build_service.rb
index e6766273441..91993700e25 100644
--- a/app/services/notes/build_service.rb
+++ b/app/services/notes/build_service.rb
@@ -4,8 +4,15 @@ module Notes
class BuildService < ::BaseService
def execute
in_reply_to_discussion_id = params.delete(:in_reply_to_discussion_id)
+ external_author = params.delete(:external_author)
+
discussion = nil
+ if external_author.present?
+ note_metadata = Notes::NoteMetadata.new(email_participant: external_author)
+ params[:note_metadata] = note_metadata
+ end
+
if in_reply_to_discussion_id.present?
discussion = find_discussion(in_reply_to_discussion_id)
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index f5efc480fef..7dd6cd9a87c 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -4,7 +4,7 @@ module Notes
class CreateService < ::Notes::BaseService
include IncidentManagement::UsageData
- def execute(skip_capture_diff_note_position: false, skip_merge_status_trigger: false)
+ def execute(skip_capture_diff_note_position: false, skip_merge_status_trigger: false, skip_set_reviewed: false)
note = Notes::BuildService.new(project, current_user, params.except(:merge_request_diff_head_sha)).execute
# n+1: https://gitlab.com/gitlab-org/gitlab-foss/issues/37440
@@ -38,7 +38,8 @@ module Notes
when_saved(
note,
skip_capture_diff_note_position: skip_capture_diff_note_position,
- skip_merge_status_trigger: skip_merge_status_trigger
+ skip_merge_status_trigger: skip_merge_status_trigger,
+ skip_set_reviewed: skip_set_reviewed
)
end
end
@@ -54,6 +55,7 @@ module Notes
content, update_params, message, command_names = quick_actions_service.execute(note, quick_action_options)
only_commands = content.empty?
note.note = content
+ note.command_names = command_names
yield(only_commands)
@@ -78,7 +80,9 @@ module Notes
end
end
- def when_saved(note, skip_capture_diff_note_position: false, skip_merge_status_trigger: false)
+ def when_saved(
+ note, skip_capture_diff_note_position: false, skip_merge_status_trigger: false,
+ skip_set_reviewed: false)
todo_service.new_note(note, current_user)
clear_noteable_diffs_cache(note)
Suggestions::CreateService.new(note).execute
@@ -86,6 +90,8 @@ module Notes
track_event(note, current_user)
if note.for_merge_request? && note.start_of_discussion?
+ set_reviewed(note) unless skip_set_reviewed
+
if !skip_capture_diff_note_position && note.diff_note?
Discussions::CaptureDiffNotePositionService.new(note.noteable, note.diff_file&.paths).execute(note.discussion)
end
@@ -161,24 +167,19 @@ module Notes
track_note_creation_usage_for_merge_requests(note) if note.for_merge_request?
track_incident_action(user, note.noteable, 'incident_comment') if note.for_issue?
track_note_creation_in_ipynb(note)
+ track_note_creation_visual_review(note)
- if Feature.enabled?(:notes_create_service_tracking, project)
- Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
- end
+ metric_key_path = 'counts.commit_comment'
- if Feature.enabled?(:route_hll_to_snowplow_phase4, project&.namespace) && note.for_commit?
- metric_key_path = 'counts.commit_comment'
-
- Gitlab::Tracking.event(
- 'Notes::CreateService',
- 'create_commit_comment',
- project: project,
- namespace: project&.namespace,
- user: user,
- label: metric_key_path,
- context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: metric_key_path).to_context]
- )
- end
+ Gitlab::Tracking.event(
+ 'Notes::CreateService',
+ 'create_commit_comment',
+ project: project,
+ namespace: project&.namespace,
+ user: user,
+ label: metric_key_path,
+ context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: metric_key_path).to_context]
+ )
end
def tracking_data_for(note)
@@ -191,8 +192,10 @@ module Notes
end
def track_note_creation_usage_for_issues(note)
- Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_added_action(author: note.author,
- project: project)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_added_action(
+ author: note.author,
+ project: project
+ )
end
def track_note_creation_usage_for_merge_requests(note)
@@ -208,6 +211,15 @@ module Notes
Gitlab::UsageDataCounters::IpynbDiffActivityCounter.note_created(note)
end
+
+ def track_note_creation_visual_review(note)
+ Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
+ end
+
+ def set_reviewed(note)
+ ::MergeRequests::MarkReviewerReviewedService.new(project: project, current_user: current_user)
+ .execute(note.noteable)
+ end
end
end
diff --git a/app/services/notes/destroy_service.rb b/app/services/notes/destroy_service.rb
index ccee94a5cea..76ddd32a76b 100644
--- a/app/services/notes/destroy_service.rb
+++ b/app/services/notes/destroy_service.rb
@@ -16,8 +16,10 @@ module Notes
private
def track_note_removal_usage_for_issues(note)
- Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_removed_action(author: note.author,
- project: project)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_removed_action(
+ author: note.author,
+ project: project
+ )
end
def track_note_removal_usage_for_merge_requests(note)
diff --git a/app/services/notes/quick_actions_service.rb b/app/services/notes/quick_actions_service.rb
index 900ace24ab4..38f7a23ce29 100644
--- a/app/services/notes/quick_actions_service.rb
+++ b/app/services/notes/quick_actions_service.rb
@@ -13,23 +13,18 @@ module Notes
delegate :commands_executed_count, to: :interpret_service, allow_nil: true
- UPDATE_SERVICES = {
- 'Issue' => Issues::UpdateService,
- 'MergeRequest' => MergeRequests::UpdateService,
- 'Commit' => Commits::TagService
- }.freeze
- private_constant :UPDATE_SERVICES
-
- def self.update_services
- UPDATE_SERVICES
- end
+ SUPPORTED_NOTEABLES = %w[WorkItem Issue MergeRequest Commit].freeze
+
+ private_constant :SUPPORTED_NOTEABLES
- def self.noteable_update_service_class(note)
- update_services[note.noteable_type]
+ def self.supported_noteables
+ SUPPORTED_NOTEABLES
end
def self.supported?(note)
- !!noteable_update_service_class(note)
+ return true if note.for_work_item?
+
+ supported_noteables.include? note.noteable_type
end
def supported?(note)
@@ -55,21 +50,28 @@ module Notes
update_params[:spend_time][:note_id] = note.id
end
- noteable_update_service_class = self.class.noteable_update_service_class(note)
-
- # TODO: This conditional is necessary because we have not fully converted all possible
- # noteable_update_service_class classes to use named arguments. See more details
- # on the partial conversion at https://gitlab.com/gitlab-org/gitlab/-/merge_requests/59182
- # Follow-on issue to address this is here:
- # https://gitlab.com/gitlab-org/gitlab/-/issues/328734
- service =
- if noteable_update_service_class.respond_to?(:constructor_container_arg)
- noteable_update_service_class.new(**noteable_update_service_class.constructor_container_arg(note.resource_parent), current_user: current_user, params: update_params)
- else
- noteable_update_service_class.new(note.resource_parent, current_user, update_params)
- end
-
- service.execute(note.noteable)
+ noteable_update_service(note, update_params).execute(note.noteable)
+ end
+
+ def noteable_update_service(note, update_params)
+ if note.for_work_item?
+ parsed_params = note.noteable.transform_quick_action_params(update_params)
+
+ WorkItems::UpdateService.new(
+ container: note.resource_parent,
+ current_user: current_user,
+ params: parsed_params[:common],
+ widget_params: parsed_params[:widgets]
+ )
+ elsif note.for_issue?
+ Issues::UpdateService.new(container: note.resource_parent, current_user: current_user, params: update_params)
+ elsif note.for_merge_request?
+ MergeRequests::UpdateService.new(
+ project: note.resource_parent, current_user: current_user, params: update_params
+ )
+ elsif note.for_commit?
+ Commits::TagService.new(note.resource_parent, current_user, update_params)
+ end
end
end
end
diff --git a/app/services/notes/update_service.rb b/app/services/notes/update_service.rb
index 2dae76feb0b..e04891da7f8 100644
--- a/app/services/notes/update_service.rb
+++ b/app/services/notes/update_service.rb
@@ -86,8 +86,10 @@ module Notes
end
def track_note_edit_usage_for_issues(note)
- Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_edited_action(author: note.author,
- project: project)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_edited_action(
+ author: note.author,
+ project: project
+ )
end
def track_note_edit_usage_for_merge_requests(note)
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 47bc36fce70..b93b44ce797 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -492,6 +492,18 @@ class NotificationService
mailer.member_access_denied_email(member.real_source_type, member.source_id, member.user_id).deliver_later
end
+ def decline_invite(member)
+ # Must always send, regardless of project/namespace configuration since it's a
+ # response to the user's action.
+
+ mailer.member_invite_declined_email(
+ member.real_source_type,
+ member.source.id,
+ member.invite_email,
+ member.created_by_id
+ ).deliver_later
+ end
+
# Project invite
def invite_project_member(project_member, token)
return true unless project_member.notifiable?(:subscription)
@@ -505,18 +517,6 @@ class NotificationService
mailer.member_invite_accepted_email(project_member.real_source_type, project_member.id).deliver_later
end
- def decline_project_invite(project_member)
- # Must always send, regardless of project/namespace configuration since it's a
- # response to the user's action.
-
- mailer.member_invite_declined_email(
- project_member.real_source_type,
- project_member.project.id,
- project_member.invite_email,
- project_member.created_by_id
- ).deliver_later
- end
-
def new_project_member(project_member)
return true unless project_member.notifiable?(:mention, skip_read_ability: true)
@@ -542,18 +542,6 @@ class NotificationService
mailer.member_invite_accepted_email(group_member.real_source_type, group_member.id).deliver_later
end
- def decline_group_invite(group_member)
- # Must always send, regardless of project/namespace configuration since it's a
- # response to the user's action.
-
- mailer.member_invite_declined_email(
- group_member.real_source_type,
- group_member.group.id,
- group_member.invite_email,
- group_member.created_by_id
- ).deliver_later
- end
-
def new_group_member(group_member)
return true unless group_member.notifiable?(:mention)
@@ -810,6 +798,10 @@ class NotificationService
end
end
+ def new_achievement_email(user, achievement)
+ mailer.new_achievement_email(user, achievement)
+ end
+
protected
def new_resource_email(target, current_user, method)
diff --git a/app/services/packages/conan/search_service.rb b/app/services/packages/conan/search_service.rb
index df22a895c00..c65c9a85da8 100644
--- a/app/services/packages/conan/search_service.rb
+++ b/app/services/packages/conan/search_service.rb
@@ -8,10 +8,6 @@ module Packages
WILDCARD = '*'
RECIPE_SEPARATOR = '@'
- def initialize(user, params)
- super(nil, user, params)
- end
-
def execute
ServiceResponse.success(payload: { results: search_results })
end
@@ -23,35 +19,34 @@ module Packages
return search_for_single_package(sanitized_query) if params[:query].include?(RECIPE_SEPARATOR)
- search_packages(build_query)
+ search_packages
end
def wildcard_query?
params[:query] == WILDCARD
end
- def build_query
- return "#{sanitized_query}%" if params[:query].end_with?(WILDCARD)
-
- sanitized_query
- end
-
- def search_packages(query)
- ::Packages::Conan::PackageFinder.new(current_user, query: query).execute.map(&:conan_recipe)
+ def sanitized_query
+ @sanitized_query ||= sanitize_sql_like(params[:query].delete(WILDCARD))
end
def search_for_single_package(query)
- name, version, username, _ = query.split(%r{[@/]})
- full_path = Packages::Conan::Metadatum.full_path_from(package_username: username)
- project = Project.find_by_full_path(full_path)
- return unless Ability.allowed?(current_user, :read_package, project&.packages_policy_subject)
+ ::Packages::Conan::SinglePackageSearchService
+ .new(query, current_user)
+ .execute[:results]
+ end
- result = project.packages.with_name(name).with_version(version).order_created.last
- [result&.conan_recipe].compact
+ def search_packages
+ ::Packages::Conan::PackageFinder
+ .new(current_user, { query: build_query }, project: project)
+ .execute
+ .map(&:conan_recipe)
end
- def sanitized_query
- @sanitized_query ||= sanitize_sql_like(params[:query].delete(WILDCARD))
+ def build_query
+ return "#{sanitized_query}%" if params[:query].end_with?(WILDCARD)
+
+ sanitized_query
end
end
end
diff --git a/app/services/packages/conan/single_package_search_service.rb b/app/services/packages/conan/single_package_search_service.rb
new file mode 100644
index 00000000000..e133b35c2cf
--- /dev/null
+++ b/app/services/packages/conan/single_package_search_service.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module Packages
+ module Conan
+ class SinglePackageSearchService # rubocop:disable Search/NamespacedClass
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(query, current_user)
+ @name, @version, @username, _ = query.split(%r{[@/]})
+ @current_user = current_user
+ end
+
+ def execute
+ ServiceResponse.success(payload: { results: search_results })
+ end
+
+ private
+
+ attr_reader :name, :version, :username, :current_user
+
+ def search_results
+ return [] unless can_access_project_package?
+
+ [package&.conan_recipe].compact
+ end
+
+ def package
+ project
+ .packages
+ .with_name(name)
+ .with_version(version)
+ .order_created
+ .last
+ end
+
+ def project
+ Project.find_by_full_path(full_path)
+ end
+ strong_memoize_attr :project
+
+ def full_path
+ ::Packages::Conan::Metadatum.full_path_from(package_username: username)
+ end
+
+ def can_access_project_package?
+ Ability.allowed?(current_user, :read_package, project.try(:packages_policy_subject))
+ end
+ end
+ end
+end
diff --git a/app/services/packages/create_event_service.rb b/app/services/packages/create_event_service.rb
index 82c4292fca8..8eac30f0022 100644
--- a/app/services/packages/create_event_service.rb
+++ b/app/services/packages/create_event_service.rb
@@ -10,15 +10,6 @@ module Packages
::Packages::Event.counters_for(event_scope, event_name, originator_type).each do |event_name|
::Gitlab::UsageDataCounters::PackageEventCounter.count(event_name)
end
-
- if Feature.enabled?(:collect_package_events) && Gitlab::Database.read_write?
- ::Packages::Event.create!(
- event_type: event_name,
- originator: current_user&.id,
- originator_type: originator_type,
- event_scope: event_scope
- )
- end
end
def originator_type
diff --git a/app/services/packages/debian/extract_metadata_service.rb b/app/services/packages/debian/extract_metadata_service.rb
index eb8227d1296..cc9defd2e73 100644
--- a/app/services/packages/debian/extract_metadata_service.rb
+++ b/app/services/packages/debian/extract_metadata_service.rb
@@ -14,6 +14,10 @@ module Packages
def execute
raise ExtractionError, 'invalid package file' unless valid_package_file?
+ if file_type == :unsupported
+ raise ExtractionError, "unsupported file extension for file #{package_file.file_name}"
+ end
+
extract_metadata
end
@@ -28,7 +32,7 @@ module Packages
end
def file_type_basic
- %i[dsc deb udeb buildinfo changes].each do |format|
+ %i[dsc deb udeb buildinfo changes ddeb].each do |format|
return format if package_file.file_name.end_with?(".#{format}")
end
@@ -36,8 +40,8 @@ module Packages
end
def file_type_source
- # https://manpages.debian.org/buster/dpkg-dev/dpkg-source.1.en.html
- %i[gzip bzip2 lzma xz].each do |format|
+ # https://manpages.debian.org/buster/dpkg-dev/dpkg-source.1.en.html#Format:_3.0_(quilt)
+ %i[gz bz2 lzma xz].each do |format|
return :source if package_file.file_name.end_with?(".tar.#{format}")
end
@@ -45,13 +49,12 @@ module Packages
end
def file_type
- strong_memoize(:file_type) do
- file_type_basic || file_type_source || :unknown
- end
+ file_type_basic || file_type_source || :unsupported
end
+ strong_memoize_attr :file_type
def file_type_debian?
- file_type == :deb || file_type == :udeb
+ file_type == :deb || file_type == :udeb || file_type == :ddeb
end
def file_type_meta?
@@ -59,18 +62,17 @@ module Packages
end
def fields
- strong_memoize(:fields) do
- if file_type_debian?
- package_file.file.use_open_file(unlink_early: false) do |file|
- ::Packages::Debian::ExtractDebMetadataService.new(file.file_path).execute
- end
- elsif file_type_meta?
- package_file.file.use_open_file do |file|
- ::Packages::Debian::ParseDebian822Service.new(file.read).execute.each_value.first
- end
+ if file_type_debian?
+ package_file.file.use_open_file(unlink_early: false) do |file|
+ ::Packages::Debian::ExtractDebMetadataService.new(file.file_path).execute
+ end
+ elsif file_type_meta?
+ package_file.file.use_open_file do |file|
+ ::Packages::Debian::ParseDebian822Service.new(file.read).execute.each_value.first
end
end
end
+ strong_memoize_attr :fields
def extract_metadata
architecture = fields['Architecture'] if file_type_debian?
diff --git a/app/services/packages/debian/find_or_create_incoming_service.rb b/app/services/packages/debian/find_or_create_incoming_service.rb
index 2d29ba5f3c3..fae87f09d41 100644
--- a/app/services/packages/debian/find_or_create_incoming_service.rb
+++ b/app/services/packages/debian/find_or_create_incoming_service.rb
@@ -4,7 +4,7 @@ module Packages
module Debian
class FindOrCreateIncomingService < ::Packages::CreatePackageService
def execute
- find_or_create_package!(:debian, name: 'incoming', version: nil)
+ find_or_create_package!(:debian, name: ::Packages::Debian::INCOMING_PACKAGE_NAME, version: nil)
end
end
end
diff --git a/app/services/packages/debian/find_or_create_package_service.rb b/app/services/packages/debian/find_or_create_package_service.rb
index cb765e956e7..a9481504d2b 100644
--- a/app/services/packages/debian/find_or_create_package_service.rb
+++ b/app/services/packages/debian/find_or_create_package_service.rb
@@ -6,13 +6,19 @@ module Packages
include Gitlab::Utils::StrongMemoize
def execute
- package = project.packages
- .debian
- .with_name(params[:name])
- .with_version(params[:version])
- .with_debian_codename_or_suite(params[:distribution_name])
- .not_pending_destruction
- .first
+ packages = project.packages
+ .existing_debian_packages_with(name: params[:name], version: params[:version])
+
+ package = packages.with_debian_codename_or_suite(params[:distribution_name]).first
+
+ unless package
+ package_in_other_distribution = packages.first
+
+ if package_in_other_distribution
+ raise ArgumentError, "Debian package #{params[:name]} #{params[:version]} exists " \
+ "in distribution #{package_in_other_distribution.debian_distribution.codename}"
+ end
+ end
package ||= create_package!(
:debian,
@@ -25,13 +31,12 @@ module Packages
private
def distribution
- strong_memoize(:distribution) do
- Packages::Debian::DistributionsFinder.new(
- project,
- codename_or_suite: params[:distribution_name]
- ).execute.last!
- end
+ Packages::Debian::DistributionsFinder.new(
+ project,
+ codename_or_suite: params[:distribution_name]
+ ).execute.last!
end
+ strong_memoize_attr :distribution
end
end
end
diff --git a/app/services/packages/debian/generate_distribution_service.rb b/app/services/packages/debian/generate_distribution_service.rb
index 12ae6c68918..d69f6eb1511 100644
--- a/app/services/packages/debian/generate_distribution_service.rb
+++ b/app/services/packages/debian/generate_distribution_service.rb
@@ -163,26 +163,38 @@ module Packages
end
def reuse_or_create_component_file(component, component_file_type, architecture, content)
- file_md5 = Digest::MD5.hexdigest(content)
file_sha256 = Digest::SHA256.hexdigest(content)
- component_file = component.files
- .with_file_type(component_file_type)
- .with_architecture(architecture)
- .with_compression_type(nil)
- .with_file_sha256(file_sha256)
- .last
-
- if component_file
+ component_files = component.files
+ .with_file_type(component_file_type)
+ .with_architecture(architecture)
+ .with_compression_type(nil)
+ .order_updated_asc
+ component_file = component_files.with_file_sha256(file_sha256).last
+ last_component_file = component_files.last
+
+ if content.empty? && (!last_component_file || last_component_file.file_sha256 == file_sha256)
+ # Do not create empty component file for empty content
+ # when there is no last component file or when the last component file is empty too
+ component_file = last_component_file || component.files.build(
+ updated_at: release_date,
+ file_type: component_file_type,
+ architecture: architecture,
+ compression_type: nil,
+ size: 0
+ )
+ elsif component_file
+ # Reuse existing component file
component_file.touch(time: release_date)
else
+ # Create a new component file
component_file = component.files.create!(
updated_at: release_date,
file_type: component_file_type,
architecture: architecture,
compression_type: nil,
file: CarrierWaveStringFile.new(content),
- file_md5: file_md5,
- file_sha256: file_sha256
+ file_sha256: file_sha256,
+ size: content.bytesize
)
end
@@ -255,7 +267,7 @@ module Packages
# used by ExclusiveLeaseGuard
def lease_key
- "packages:debian:generate_distribution_service:distribution:#{@distribution.id}"
+ "packages:debian:generate_distribution_service:#{@distribution.class.container_type}_distribution:#{@distribution.id}"
end
# used by ExclusiveLeaseGuard
diff --git a/app/services/packages/debian/process_package_file_service.rb b/app/services/packages/debian/process_package_file_service.rb
index 7d2d71184e6..f4fcd3a563c 100644
--- a/app/services/packages/debian/process_package_file_service.rb
+++ b/app/services/packages/debian/process_package_file_service.rb
@@ -6,7 +6,7 @@ module Packages
include ExclusiveLeaseGuard
include Gitlab::Utils::StrongMemoize
- SOURCE_FIELD_SPLIT_REGEX = /[ ()]/.freeze
+ SOURCE_FIELD_SPLIT_REGEX = /[ ()]/
# used by ExclusiveLeaseGuard
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
@@ -41,7 +41,9 @@ module Packages
raise ArgumentError, 'package file without Debian metadata' unless @package_file.debian_file_metadatum
raise ArgumentError, 'already processed package file' unless @package_file.debian_file_metadatum.unknown?
- return if file_metadata[:file_type] == :deb || file_metadata[:file_type] == :udeb
+ if file_metadata[:file_type] == :deb || file_metadata[:file_type] == :udeb || file_metadata[:file_type] == :ddeb
+ return
+ end
raise ArgumentError, "invalid package file type: #{file_metadata[:file_type]}"
end
@@ -52,14 +54,21 @@ module Packages
strong_memoize_attr :file_metadata
def package
- package = temp_package.project
- .packages
- .debian
- .with_name(package_name)
- .with_version(package_version)
- .with_debian_codename_or_suite(@distribution_name)
- .not_pending_destruction
- .last
+ packages = temp_package.project
+ .packages
+ .existing_debian_packages_with(name: package_name, version: package_version)
+ package = packages.with_debian_codename_or_suite(@distribution_name)
+ .first
+
+ unless package
+ package_in_other_distribution = packages.first
+
+ if package_in_other_distribution
+ raise ArgumentError, "Debian package #{package_name} #{package_version} exists " \
+ "in distribution #{package_in_other_distribution.debian_distribution.codename}"
+ end
+ end
+
package || temp_package
end
strong_memoize_attr :package
diff --git a/app/services/packages/generic/create_package_file_service.rb b/app/services/packages/generic/create_package_file_service.rb
index 78c97000654..09e3fb4a825 100644
--- a/app/services/packages/generic/create_package_file_service.rb
+++ b/app/services/packages/generic/create_package_file_service.rb
@@ -47,7 +47,11 @@ module Packages
end
def target_file_is_duplicate?(package)
- package.package_files.with_file_name(params[:file_name]).exists?
+ package
+ .package_files
+ .with_file_name(params[:file_name])
+ .not_pending_destruction
+ .exists?
end
end
end
diff --git a/app/services/packages/mark_package_for_destruction_service.rb b/app/services/packages/mark_package_for_destruction_service.rb
index 3417febe79a..8ccc242ae36 100644
--- a/app/services/packages/mark_package_for_destruction_service.rb
+++ b/app/services/packages/mark_package_for_destruction_service.rb
@@ -13,7 +13,8 @@ module Packages
package.sync_maven_metadata(current_user)
service_response_success('Package was successfully marked as pending destruction')
- rescue StandardError
+ rescue StandardError => e
+ track_exception(e)
service_response_error('Failed to mark the package as pending destruction', 400)
end
@@ -30,5 +31,13 @@ module Packages
def user_can_delete_package?
can?(current_user, :destroy_package, package.project)
end
+
+ def track_exception(error)
+ Gitlab::ErrorTracking.track_exception(
+ error,
+ project_id: package.project_id,
+ package_id: package.id
+ )
+ end
end
end
diff --git a/app/services/packages/mark_packages_for_destruction_service.rb b/app/services/packages/mark_packages_for_destruction_service.rb
index 023392cf2d9..ade9ad2c974 100644
--- a/app/services/packages/mark_packages_for_destruction_service.rb
+++ b/app/services/packages/mark_packages_for_destruction_service.rb
@@ -31,13 +31,15 @@ module Packages
def execute(batch_size: BATCH_SIZE)
no_access = false
min_batch_size = [batch_size, BATCH_SIZE].min
+ package_ids = []
@packages.each_batch(of: min_batch_size) do |batched_packages|
loaded_packages = batched_packages.including_project_route.to_a
+ package_ids = loaded_packages.map(&:id)
break no_access = true unless can_destroy_packages?(loaded_packages)
- ::Packages::Package.id_in(loaded_packages.map(&:id))
+ ::Packages::Package.id_in(package_ids)
.update_all(status: :pending_destruction)
sync_maven_metadata(loaded_packages)
@@ -47,7 +49,8 @@ module Packages
return UNAUTHORIZED_RESPONSE if no_access
SUCCESS_RESPONSE
- rescue StandardError
+ rescue StandardError => e
+ track_exception(e, package_ids)
ERROR_RESPONSE
end
@@ -75,5 +78,9 @@ module Packages
can?(@current_user, :destroy_package, package)
end
end
+
+ def track_exception(error, package_ids)
+ Gitlab::ErrorTracking.track_exception(error, package_ids: package_ids)
+ end
end
end
diff --git a/app/services/packages/maven/find_or_create_package_service.rb b/app/services/packages/maven/find_or_create_package_service.rb
index b29adf4e11a..ac0c77391d7 100644
--- a/app/services/packages/maven/find_or_create_package_service.rb
+++ b/app/services/packages/maven/find_or_create_package_service.rb
@@ -3,10 +3,13 @@ module Packages
module Maven
class FindOrCreatePackageService < BaseService
SNAPSHOT_TERM = '-SNAPSHOT'
+ MAX_FILE_NAME_LENGTH = 5000
def execute
+ return ServiceResponse.error(message: 'File name is too long') if file_name_too_long?
+
package =
- ::Packages::Maven::PackageFinder.new(current_user, project, path: params[:path])
+ ::Packages::Maven::PackageFinder.new(current_user, project, path: path)
.execute
unless Namespace::PackageSetting.duplicates_allowed?(package)
@@ -32,16 +35,16 @@ module Packages
# - my-company/my-app/maven-metadata.xml
#
# The first upload has to create the proper package (the one with the version set).
- if params[:file_name] == Packages::Maven::Metadata.filename && !params[:path]&.ends_with?(SNAPSHOT_TERM)
- package_name = params[:path]
+ if file_name == Packages::Maven::Metadata.filename && !snapshot_version?
+ package_name = path
version = nil
else
- package_name, _, version = params[:path].rpartition('/')
+ package_name, _, version = path.rpartition('/')
end
package_params = {
name: package_name,
- path: params[:path],
+ path: path,
status: params[:status],
version: version
}
@@ -58,21 +61,55 @@ module Packages
private
- def extname(filename)
- return if filename.blank?
+ def file_name_too_long?
+ return false unless file_name
- File.extname(filename)
+ file_name.size > MAX_FILE_NAME_LENGTH
end
def target_package_is_duplicate?(package)
# duplicate metadata files can be uploaded multiple times
return false if package.version.nil?
- package
- .package_files
- .map { |file| extname(file.file_name) }
- .compact
- .include?(extname(params[:file_name]))
+ existing_file_names = strip_snapshot_parts(
+ package.package_files
+ .map(&:file_name)
+ .compact
+ )
+
+ published_file_name = strip_snapshot_parts_from(file_name)
+ existing_file_names.include?(published_file_name)
+ end
+
+ def strip_snapshot_parts(file_names)
+ return file_names unless snapshot_version?
+
+ Array.wrap(file_names).map { |f| strip_snapshot_parts_from(f) }
+ end
+
+ def strip_snapshot_parts_from(file_name)
+ return file_name unless snapshot_version?
+ return unless file_name
+
+ match_data = file_name.match(Gitlab::Regex::Packages::MAVEN_SNAPSHOT_DYNAMIC_PARTS)
+
+ if match_data
+ file_name.gsub(match_data.captures.last, "")
+ else
+ file_name
+ end
+ end
+
+ def snapshot_version?
+ path&.ends_with?(SNAPSHOT_TERM)
+ end
+
+ def path
+ params[:path]
+ end
+
+ def file_name
+ params[:file_name]
end
end
end
diff --git a/app/services/packages/npm/create_metadata_cache_service.rb b/app/services/packages/npm/create_metadata_cache_service.rb
new file mode 100644
index 00000000000..1cc5f7f34e7
--- /dev/null
+++ b/app/services/packages/npm/create_metadata_cache_service.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module Packages
+ module Npm
+ class CreateMetadataCacheService
+ include Gitlab::Utils::StrongMemoize
+ include ExclusiveLeaseGuard
+
+ # used by ExclusiveLeaseGuard
+ DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
+
+ def initialize(project, package_name, packages)
+ @project = project
+ @package_name = package_name
+ @packages = packages
+ end
+
+ def execute
+ try_obtain_lease do
+ Packages::Npm::MetadataCache
+ .find_or_build(package_name: package_name, project_id: project.id)
+ .update!(
+ file: CarrierWaveStringFile.new(metadata_content),
+ size: metadata_content.bytesize
+ )
+ end
+ end
+
+ private
+
+ attr_reader :package_name, :packages, :project
+
+ def metadata_content
+ metadata.payload.to_json
+ end
+ strong_memoize_attr :metadata_content
+
+ def metadata
+ Packages::Npm::GenerateMetadataService.new(package_name, packages).execute
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_key
+ "packages:npm:create_metadata_cache_service:metadata_caches:#{project.id}_#{package_name}"
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_timeout
+ DEFAULT_LEASE_TIMEOUT
+ end
+ end
+ end
+end
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
index dd074f7472b..c71ae060dd9 100644
--- a/app/services/packages/npm/create_package_service.rb
+++ b/app/services/packages/npm/create_package_service.rb
@@ -3,15 +3,24 @@ module Packages
module Npm
class CreatePackageService < ::Packages::CreatePackageService
include Gitlab::Utils::StrongMemoize
+ include ExclusiveLeaseGuard
- PACKAGE_JSON_NOT_ALLOWED_FIELDS = %w[readme readmeFilename].freeze
+ PACKAGE_JSON_NOT_ALLOWED_FIELDS = %w[readme readmeFilename licenseText].freeze
+ DEFAULT_LEASE_TIMEOUT = 1.hour.to_i
def execute
return error('Version is empty.', 400) if version.blank?
+ return error('Attachment data is empty.', 400) if attachment['data'].blank?
return error('Package already exists.', 403) if current_package_exists?
return error('File is too large.', 400) if file_size_exceeded?
- ApplicationRecord.transaction { create_npm_package! }
+ package = try_obtain_lease do
+ ApplicationRecord.transaction { create_npm_package! }
+ end
+
+ return error('Could not obtain package lease.', 400) unless package
+
+ package
end
private
@@ -23,11 +32,21 @@ module Packages
::Packages::CreateDependencyService.new(package, package_dependencies).execute
::Packages::Npm::CreateTagService.new(package, dist_tag).execute
- package.create_npm_metadatum!(package_json: package_json)
+ create_npm_metadatum!(package)
package
end
+ def create_npm_metadatum!(package)
+ package.create_npm_metadatum!(package_json: package_json)
+ rescue ActiveRecord::RecordInvalid => e
+ if package.npm_metadatum && package.npm_metadatum.errors.added?(:package_json, 'structure is too large')
+ Gitlab::ErrorTracking.track_exception(e, field_sizes: field_sizes_for_error_tracking)
+ end
+
+ raise
+ end
+
def current_package_exists?
project.packages
.npm
@@ -103,6 +122,45 @@ module Packages
def file_size_exceeded?
project.actual_limits.exceeded?(:npm_max_file_size, calculated_package_file_size)
end
+
+ # used by ExclusiveLeaseGuard
+ def lease_key
+ "packages:npm:create_package_service:packages:#{project.id}_#{name}_#{version}"
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_timeout
+ DEFAULT_LEASE_TIMEOUT
+ end
+
+ def field_sizes
+ strong_memoize(:field_sizes) do
+ package_json.transform_values do |value|
+ value.to_s.size
+ end
+ end
+ end
+
+ def filtered_field_sizes
+ strong_memoize(:filtered_field_sizes) do
+ field_sizes.select do |_, size|
+ size >= ::Packages::Npm::Metadatum::MIN_PACKAGE_JSON_FIELD_SIZE_FOR_ERROR_TRACKING
+ end
+ end
+ end
+
+ def largest_fields
+ strong_memoize(:largest_fields) do
+ field_sizes
+ .sort_by { |a| a[1] }
+ .reverse[0..::Packages::Npm::Metadatum::NUM_FIELDS_FOR_ERROR_TRACKING - 1]
+ .to_h
+ end
+ end
+
+ def field_sizes_for_error_tracking
+ filtered_field_sizes.empty? ? largest_fields : filtered_field_sizes
+ end
end
end
end
diff --git a/app/services/packages/npm/deprecate_package_service.rb b/app/services/packages/npm/deprecate_package_service.rb
new file mode 100644
index 00000000000..2633e9f877c
--- /dev/null
+++ b/app/services/packages/npm/deprecate_package_service.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module Packages
+ module Npm
+ class DeprecatePackageService < BaseService
+ Deprecated = Struct.new(:package_id, :message)
+ BATCH_SIZE = 50
+
+ def initialize(project, params)
+ super(project, nil, params)
+ end
+
+ def execute(async: false)
+ return ::Packages::Npm::DeprecatePackageWorker.perform_async(project.id, filtered_params) if async
+
+ packages.select(:id, :version).each_batch(of: BATCH_SIZE) do |relation|
+ deprecated_metadatum = handle_batch(relation)
+ update_metadatum(deprecated_metadatum)
+ end
+ end
+
+ private
+
+ # To avoid passing the whole metadata to the worker
+ def filtered_params
+ {
+ package_name: params[:package_name],
+ versions: params[:versions].transform_values { |version| version.slice(:deprecated) }
+ }
+ end
+
+ def packages
+ ::Packages::Npm::PackageFinder
+ .new(params['package_name'], project: project, last_of_each_version: false)
+ .execute
+ end
+
+ def handle_batch(relation)
+ relation
+ .preload_npm_metadatum
+ .filter_map { |package| deprecate(package) }
+ end
+
+ def deprecate(package)
+ deprecation_message = params.dig('versions', package.version, 'deprecated')
+ return if deprecation_message.nil?
+
+ npm_metadatum = package.npm_metadatum
+ return if identical?(npm_metadatum.package_json['deprecated'], deprecation_message)
+
+ Deprecated.new(npm_metadatum.package_id, deprecation_message)
+ end
+
+ def identical?(package_json_deprecated, deprecation_message)
+ package_json_deprecated == deprecation_message ||
+ (package_json_deprecated.nil? && deprecation_message.empty?)
+ end
+
+ def update_metadatum(deprecated_metadatum)
+ return if deprecated_metadatum.empty?
+
+ deprecation_message = deprecated_metadatum.first.message
+
+ ::Packages::Npm::Metadatum
+ .package_id_in(deprecated_metadatum.map(&:package_id))
+ .update_all(update_clause(deprecation_message))
+ end
+
+ def update_clause(deprecation_message)
+ if deprecation_message.empty?
+ "package_json = package_json - 'deprecated'"
+ else
+ ["package_json = jsonb_set(package_json, '{deprecated}', ?)", deprecation_message.to_json]
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/npm/generate_metadata_service.rb b/app/services/packages/npm/generate_metadata_service.rb
new file mode 100644
index 00000000000..800c3ce19b4
--- /dev/null
+++ b/app/services/packages/npm/generate_metadata_service.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+module Packages
+ module Npm
+ class GenerateMetadataService
+ include API::Helpers::RelatedResourcesHelpers
+
+ # Allowed fields are those defined in the abbreviated form
+ # defined here: https://github.com/npm/registry/blob/master/docs/responses/package-metadata.md#abbreviated-version-object
+ # except: name, version, dist, dependencies and xDependencies. Those are generated by this service.
+ PACKAGE_JSON_ALLOWED_FIELDS = %w[deprecated bin directories dist engines _hasShrinkwrap].freeze
+
+ def initialize(name, packages)
+ @name = name
+ @packages = packages
+ end
+
+ def execute(only_dist_tags: false)
+ ServiceResponse.success(payload: metadata(only_dist_tags))
+ end
+
+ private
+
+ attr_reader :name, :packages
+
+ def metadata(only_dist_tags)
+ result = { dist_tags: dist_tags }
+
+ unless only_dist_tags
+ result[:name] = name
+ result[:versions] = versions
+ end
+
+ result
+ end
+
+ def versions
+ package_versions = {}
+
+ packages.each_batch do |relation|
+ batched_packages = relation.including_dependency_links
+ .preload_files
+ .preload_npm_metadatum
+
+ batched_packages.each do |package|
+ package_file = package.installable_package_files.last
+
+ next unless package_file
+
+ package_versions[package.version] = build_package_version(package, package_file)
+ end
+ end
+
+ package_versions
+ end
+
+ def dist_tags
+ build_package_tags.tap { |t| t['latest'] ||= sorted_versions.last }
+ end
+
+ def build_package_tags
+ package_tags.to_h { |tag| [tag.name, tag.package.version] }
+ end
+
+ def build_package_version(package, package_file)
+ abbreviated_package_json(package).merge(
+ name: package.name,
+ version: package.version,
+ dist: {
+ shasum: package_file.file_sha1,
+ tarball: tarball_url(package, package_file)
+ }
+ ).tap do |package_version|
+ package_version.merge!(build_package_dependencies(package))
+ end
+ end
+
+ def tarball_url(package, package_file)
+ expose_url api_v4_projects_packages_npm_package_name___file_name_path(
+ { id: package.project_id, package_name: package.name, file_name: package_file.file_name }, true
+ )
+ end
+
+ def build_package_dependencies(package)
+ dependencies = Hash.new { |h, key| h[key] = {} }
+
+ package.dependency_links.each do |dependency_link|
+ dependency = dependency_link.dependency
+ dependencies[dependency_link.dependency_type][dependency.name] = dependency.version_pattern
+ end
+
+ dependencies
+ end
+
+ def sorted_versions
+ versions = packages.pluck_versions.compact
+ VersionSorter.sort(versions)
+ end
+
+ def package_tags
+ Packages::Tag.for_package_ids(packages.last_of_each_version_ids)
+ .preload_package
+ end
+
+ def abbreviated_package_json(package)
+ json = package.npm_metadatum&.package_json || {}
+ json.slice(*PACKAGE_JSON_ALLOWED_FIELDS)
+ end
+ end
+ end
+end
diff --git a/app/services/personal_access_tokens/create_service.rb b/app/services/personal_access_tokens/create_service.rb
index e2f2e220750..adb7924f35e 100644
--- a/app/services/personal_access_tokens/create_service.rb
+++ b/app/services/personal_access_tokens/create_service.rb
@@ -2,11 +2,12 @@
module PersonalAccessTokens
class CreateService < BaseService
- def initialize(current_user:, target_user:, params: {})
+ def initialize(current_user:, target_user:, params: {}, concatenate_errors: true)
@current_user = current_user
@target_user = target_user
@params = params.dup
@ip_address = @params.delete(:ip_address)
+ @concatenate_errors = concatenate_errors
end
def execute
@@ -19,7 +20,10 @@ module PersonalAccessTokens
notification_service.access_token_created(target_user, token.name)
ServiceResponse.success(payload: { personal_access_token: token })
else
- ServiceResponse.error(message: token.errors.full_messages.to_sentence, payload: { personal_access_token: token })
+ message = token.errors.full_messages
+ message = message.to_sentence if @concatenate_errors
+
+ ServiceResponse.error(message: message, payload: { personal_access_token: token })
end
end
diff --git a/app/services/personal_access_tokens/rotate_service.rb b/app/services/personal_access_tokens/rotate_service.rb
new file mode 100644
index 00000000000..64b0c5c98a9
--- /dev/null
+++ b/app/services/personal_access_tokens/rotate_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module PersonalAccessTokens
+ class RotateService
+ EXPIRATION_PERIOD = 1.week
+
+ def initialize(current_user, token)
+ @current_user = current_user
+ @token = token
+ end
+
+ def execute
+ return ServiceResponse.error(message: _('token already revoked')) if token.revoked?
+
+ response = ServiceResponse.success
+
+ PersonalAccessToken.transaction do
+ unless token.revoke!
+ response = ServiceResponse.error(message: _('failed to revoke token'))
+ raise ActiveRecord::Rollback
+ end
+
+ target_user = token.user
+ new_token = target_user.personal_access_tokens.create(create_token_params(token))
+
+ if new_token.persisted?
+ response = ServiceResponse.success(payload: { personal_access_token: new_token })
+ else
+ response = ServiceResponse.error(message: new_token.errors.full_messages.to_sentence)
+
+ raise ActiveRecord::Rollback
+ end
+ end
+
+ response
+ end
+
+ private
+
+ attr_reader :current_user, :token
+
+ def create_token_params(token)
+ { name: token.name,
+ impersonation: token.impersonation,
+ scopes: token.scopes,
+ expires_at: Date.today + EXPIRATION_PERIOD }
+ end
+ end
+end
diff --git a/app/services/preview_markdown_service.rb b/app/services/preview_markdown_service.rb
index b3a9beabba5..c8ccbe1465e 100644
--- a/app/services/preview_markdown_service.rb
+++ b/app/services/preview_markdown_service.rb
@@ -24,7 +24,7 @@ class PreviewMarkdownService < BaseService
return text, [] unless quick_action_types.include?(target_type)
quick_actions_service = QuickActions::InterpretService.new(project, current_user)
- quick_actions_service.explain(text, find_commands_target)
+ quick_actions_service.explain(text, find_commands_target, keep_actions: params[:render_quick_actions])
end
def find_user_references(text)
diff --git a/app/services/projects/android_target_platform_detector_service.rb b/app/services/projects/android_target_platform_detector_service.rb
deleted file mode 100644
index 11635ad18d5..00000000000
--- a/app/services/projects/android_target_platform_detector_service.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- # Service class to detect if a project is made to run on the Android platform.
- #
- # This service searches for an AndroidManifest.xml file which all Android app
- # project must have. It returns the symbol :android if the given project is an
- # Android app project.
- #
- # Ref: https://developer.android.com/guide/topics/manifest/manifest-intro
- #
- # Example usage:
- # > AndroidTargetPlatformDetectorService.new(a_project).execute
- # => nil
- # > AndroidTargetPlatformDetectorService.new(an_android_project).execute
- # => :android
- class AndroidTargetPlatformDetectorService < BaseService
- # <manifest> element is required and must occur once inside AndroidManifest.xml
- MANIFEST_FILE_SEARCH_QUERY = '<manifest filename:AndroidManifest.xml'
-
- def execute
- detect
- end
-
- private
-
- def file_finder
- @file_finder ||= ::Gitlab::FileFinder.new(project, project.default_branch)
- end
-
- def detect
- return :android if file_finder.find(MANIFEST_FILE_SEARCH_QUERY).present?
- end
- end
-end
diff --git a/app/services/projects/batch_open_merge_requests_count_service.rb b/app/services/projects/batch_open_merge_requests_count_service.rb
new file mode 100644
index 00000000000..62d1b018a55
--- /dev/null
+++ b/app/services/projects/batch_open_merge_requests_count_service.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+# Service class for getting and caching the number of merge requests of several projects
+# Warning: do not user this service with a really large set of projects
+# because the service use maps to retrieve the project ids
+module Projects
+ class BatchOpenMergeRequestsCountService < Projects::BatchCountService
+ # rubocop: disable CodeReuse/ActiveRecord
+ def global_count
+ @global_count ||= count_service.query(project_ids).group(:project_id).count
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def count_service
+ ::Projects::OpenMergeRequestsCountService
+ end
+ end
+end
diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb
deleted file mode 100644
index 58e146e5a32..00000000000
--- a/app/services/projects/blame_service.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-# Service class to correctly initialize Gitlab::Blame and Kaminari pagination
-# objects
-module Projects
- class BlameService
- PER_PAGE = 1000
-
- def initialize(blob, commit, params)
- @blob = blob
- @commit = commit
- @page = extract_page(params)
- @pagination_enabled = pagination_state(params)
- end
-
- attr_reader :page
-
- def blame
- Gitlab::Blame.new(blob, commit, range: blame_range)
- end
-
- def pagination
- return unless pagination_enabled
-
- Kaminari.paginate_array([], total_count: blob_lines_count, limit: per_page)
- .tap { |pagination| pagination.max_paginates_per(per_page) }
- .page(page)
- end
-
- def per_page
- PER_PAGE
- end
-
- private
-
- attr_reader :blob, :commit, :pagination_enabled
-
- def blame_range
- return unless pagination_enabled
-
- first_line = (page - 1) * per_page + 1
- last_line = (first_line + per_page).to_i - 1
-
- first_line..last_line
- end
-
- def extract_page(params)
- page = params.fetch(:page, 1).to_i
-
- return 1 if page < 1 || overlimit?(page)
-
- page
- end
-
- def pagination_state(params)
- return false if Gitlab::Utils.to_boolean(params[:no_pagination], default: false)
-
- Feature.enabled?(:blame_page_pagination, commit.project)
- end
-
- def overlimit?(page)
- page * per_page >= blob_lines_count + per_page
- end
-
- def blob_lines_count
- @blob_lines_count ||= blob.data.lines.count
- end
- end
-end
diff --git a/app/services/projects/container_repository/gitlab/cleanup_tags_service.rb b/app/services/projects/container_repository/gitlab/cleanup_tags_service.rb
index b69a3cc1a2c..714a9d43333 100644
--- a/app/services/projects/container_repository/gitlab/cleanup_tags_service.rb
+++ b/app/services/projects/container_repository/gitlab/cleanup_tags_service.rb
@@ -45,12 +45,12 @@ module Projects
end
def with_timeout
- result = {
+ result = success(
original_size: 0,
before_delete_size: 0,
deleted_size: 0,
deleted: []
- }
+ )
yield Time.zone.now, result
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 94cc4700a49..8ad2b0ac761 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -5,7 +5,7 @@ module Projects
include ValidatesClassificationLabel
ImportSourceDisabledError = Class.new(StandardError)
- INTERNAL_IMPORT_SOURCES = %w[bare_repository gitlab_custom_project_template gitlab_project_migration].freeze
+ INTERNAL_IMPORT_SOURCES = %w[gitlab_custom_project_template gitlab_project_migration].freeze
def initialize(user, params)
@current_user = user
@@ -58,6 +58,7 @@ module Projects
return @project if @project.errors.any?
validate_create_permissions
+ validate_import_permissions
return @project if @project.errors.any?
@relations_block&.call(@project)
@@ -98,6 +99,13 @@ module Projects
@project.errors.add(:namespace, "is not valid")
end
+ def validate_import_permissions
+ return unless @project.import?
+ return if current_user.can?(:import_projects, parent_namespace)
+
+ @project.errors.add(:user, 'is not allowed to import projects')
+ end
+
def after_create_actions
log_info("#{current_user.name} created a new project \"#{@project.full_name}\"")
@@ -144,8 +152,10 @@ module Projects
# completes), and any other affected users in the background
def setup_authorizations
if @project.group
- group_access_level = @project.group.max_member_access_for_user(current_user,
- only_concrete_membership: true)
+ group_access_level = @project.group.max_member_access_for_user(
+ current_user,
+ only_concrete_membership: true
+ )
if group_access_level > GroupMember::NO_ACCESS
current_user.project_authorizations.safe_find_or_create_by!(
@@ -187,7 +197,7 @@ module Projects
def create_readme
commit_attrs = {
- branch_name: @default_branch.presence || @project.default_branch_or_main,
+ branch_name: default_branch,
commit_message: 'Initial commit',
file_path: 'README.md',
file_content: readme_content
@@ -201,7 +211,11 @@ module Projects
end
def readme_content
- @readme_template.presence || ReadmeRendererService.new(@project, current_user).execute
+ readme_attrs = {
+ default_branch: default_branch
+ }
+
+ @readme_template.presence || ReadmeRendererService.new(@project, current_user, readme_attrs).execute
end
def skip_wiki?
@@ -217,8 +231,10 @@ module Projects
@project.create_labels unless @project.gitlab_project_import?
- unless @project.import?
- raise 'Failed to create repository' unless @project.create_repository
+ break if @project.import?
+
+ unless @project.create_repository(default_branch: default_branch)
+ raise 'Failed to create repository'
end
end
end
@@ -267,6 +283,10 @@ module Projects
private
+ def default_branch
+ @default_branch.presence || @project.default_branch_or_main
+ end
+
def validate_import_source_enabled!
return unless @params[:import_type]
@@ -274,6 +294,9 @@ module Projects
return if INTERNAL_IMPORT_SOURCES.include?(import_type)
+ # Skip validation when creating project from a built in template
+ return if @params[:import_export_upload].present? && import_type == 'gitlab_project'
+
unless ::Gitlab::CurrentSettings.import_sources&.include?(import_type)
raise ImportSourceDisabledError, "#{import_type} import source is disabled"
end
@@ -289,7 +312,7 @@ module Projects
def import_schedule
if @project.errors.empty?
- @project.import_state.schedule if @project.import? && !@project.bare_repository_import? && !@project.gitlab_project_migration?
+ @project.import_state.schedule if @project.import? && !@project.gitlab_project_migration?
else
fail(error: @project.errors.full_messages.join(', '))
end
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index 5fce816064b..aace8846afc 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -92,8 +92,10 @@ module Projects
def build_fork_network_member(fork_to_project)
if allowed_fork?
- fork_to_project.build_fork_network_member(forked_from_project: @project,
- fork_network: fork_network)
+ fork_to_project.build_fork_network_member(
+ forked_from_project: @project,
+ fork_network: fork_network
+ )
else
fork_to_project.errors.add(:forked_from_project_id, 'is forbidden')
end
diff --git a/app/services/projects/forks/sync_service.rb b/app/services/projects/forks/sync_service.rb
new file mode 100644
index 00000000000..4c70d7f17f5
--- /dev/null
+++ b/app/services/projects/forks/sync_service.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+module Projects
+ module Forks
+ # A service for fetching upstream default branch and merging it to the fork's specified branch.
+ class SyncService < BaseService
+ ONGOING_MERGE_ERROR = 'The synchronization did not happen due to another merge in progress'
+
+ MergeError = Class.new(StandardError)
+
+ def initialize(project, user, target_branch)
+ super(project, user)
+
+ @source_project = project.fork_source
+ @head_sha = project.repository.commit(target_branch).sha
+ @target_branch = target_branch
+ @details = Projects::Forks::Details.new(project, target_branch)
+ end
+
+ def execute
+ execute_service
+
+ ServiceResponse.success
+ rescue MergeError => e
+ Gitlab::ErrorTracking.log_exception(e, { project_id: project.id, user_id: current_user.id })
+
+ ServiceResponse.error(message: e.message)
+ ensure
+ details.exclusive_lease.cancel
+ end
+
+ private
+
+ attr_reader :source_project, :head_sha, :target_branch, :details
+
+ # The method executes multiple steps:
+ #
+ # 1. Gitlab::Git::CrossRepo fetches upstream default branch into a temporary ref and returns new source sha.
+ # 2. New divergence counts are calculated using the source sha.
+ # 3. If the fork is not behind, there is nothing to merge -> exit.
+ # 4. Otherwise, continue with the new source sha.
+ # 5. If Gitlab::Git::CommandError is raised it means that merge couldn't happen due to a merge conflict. The
+ # details are updated to transfer this error to the user.
+ def execute_service
+ counts = []
+ source_sha = source_project.commit.sha
+
+ Gitlab::Git::CrossRepo.new(repository, source_project.repository)
+ .execute(source_sha) do |cross_repo_source_sha|
+ counts = repository.diverging_commit_count(head_sha, cross_repo_source_sha)
+ ahead, behind = counts
+ next if behind == 0
+
+ execute_with_fetched_source(cross_repo_source_sha, ahead)
+ end
+ rescue Gitlab::Git::CommandError => e
+ details.update!({ sha: head_sha, source_sha: source_sha, counts: counts, has_conflicts: true })
+
+ raise MergeError, e.message
+ end
+
+ def execute_with_fetched_source(cross_repo_source_sha, ahead)
+ with_linked_lfs_pointers(cross_repo_source_sha) do
+ merge_commit_id = perform_merge(cross_repo_source_sha, ahead)
+ raise MergeError, ONGOING_MERGE_ERROR unless merge_commit_id
+ end
+ end
+
+ # This method merges the upstream default branch to the fork specified branch.
+ # Depending on whether the fork branch is ahead of upstream or not, a different type of
+ # merge is performed.
+ #
+ # If the fork's branch is not ahead of the upstream (only behind), fast-forward merge is performed.
+ # However, if the fork's branch contains commits that don't exist upstream, a merge commit is created.
+ # In this case, a conflict may happen, which interrupts the merge and returns a message to the user.
+ def perform_merge(cross_repo_source_sha, ahead)
+ if ahead > 0
+ message = "Merge branch #{source_project.path}:#{source_project.default_branch} into #{target_branch}"
+
+ repository.merge_to_branch(current_user,
+ source_sha: cross_repo_source_sha,
+ target_branch: target_branch,
+ target_sha: head_sha,
+ message: message)
+ else
+ repository.ff_merge(current_user, cross_repo_source_sha, target_branch, target_sha: head_sha)
+ end
+ end
+
+ # This method links the newly merged lfs objects (if any) with the existing ones upstream.
+ # The LfsLinkService service has a limit and may raise an error if there are too many lfs objects to link.
+ # This is the reason why the block is passed:
+ #
+ # 1. Verify that there are not too many lfs objects to link
+ # 2. Execute the block (which basically performs the merge)
+ # 3. Link lfs objects
+ def with_linked_lfs_pointers(newrev, &block)
+ return yield unless project.lfs_enabled?
+
+ oldrev = head_sha
+ new_lfs_oids =
+ Gitlab::Git::LfsChanges
+ .new(repository, newrev)
+ .new_pointers(not_in: [oldrev])
+ .map(&:lfs_oid)
+
+ Projects::LfsPointers::LfsLinkService.new(project).execute(new_lfs_oids, &block)
+ rescue Projects::LfsPointers::LfsLinkService::TooManyOidsError => e
+ raise MergeError, e.message
+ end
+ end
+ end
+end
diff --git a/app/services/projects/hashed_storage/base_repository_service.rb b/app/services/projects/hashed_storage/base_repository_service.rb
index 349d4d367be..6241a3e144f 100644
--- a/app/services/projects/hashed_storage/base_repository_service.rb
+++ b/app/services/projects/hashed_storage/base_repository_service.rb
@@ -9,7 +9,7 @@ module Projects
include Gitlab::ShellAdapter
attr_reader :old_disk_path, :new_disk_path, :old_storage_version,
- :logger, :move_wiki, :move_design
+ :logger, :move_wiki, :move_design
def initialize(project:, old_disk_path:, logger: nil)
@project = project
diff --git a/app/services/projects/import_export/relation_export_service.rb b/app/services/projects/import_export/relation_export_service.rb
index dce40cf18ba..33da5b39c20 100644
--- a/app/services/projects/import_export/relation_export_service.rb
+++ b/app/services/projects/import_export/relation_export_service.rb
@@ -85,6 +85,7 @@ module Projects
logger.error(
message: 'Project relation export failed',
export_error: error_message,
+ relation: relation_export.relation,
project_export_job_id: project_export_job.id,
project_name: project.name,
project_id: project.id
diff --git a/app/services/projects/import_service.rb b/app/services/projects/import_service.rb
index e6ccae0a22b..ceab7098b32 100644
--- a/app/services/projects/import_service.rb
+++ b/app/services/projects/import_service.rb
@@ -36,8 +36,11 @@ module Projects
)
message = Projects::ImportErrorFilter.filter_message(e.message)
- error(s_("ImportProjects|Error importing repository %{project_safe_import_url} into %{project_full_path} - %{message}") %
- { project_safe_import_url: project.safe_import_url, project_full_path: project.full_path, message: message })
+ error(
+ s_(
+ "ImportProjects|Error importing repository %{project_safe_import_url} into %{project_full_path} - %{message}"
+ ) % { project_safe_import_url: project.safe_import_url, project_full_path: project.full_path, message: message }
+ )
end
protected
diff --git a/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb b/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
index f7de7f98768..a87996b70e8 100644
--- a/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_download_link_list_service.rb
@@ -51,9 +51,7 @@ module Projects
end
def download_links_for(oids)
- response = Gitlab::HTTP.post(remote_uri,
- body: request_body(oids),
- headers: headers)
+ response = Gitlab::HTTP.post(remote_uri, body: request_body(oids), headers: headers)
raise DownloadLinksRequestEntityTooLargeError if response.request_entity_too_large?
raise DownloadLinksError, response.message unless response.success?
@@ -78,10 +76,12 @@ module Projects
raise DownloadLinkNotFound unless link
- link_list << LfsDownloadObject.new(oid: entry['oid'],
- size: entry['size'],
- headers: headers,
- link: add_credentials(link))
+ link_list << LfsDownloadObject.new(
+ oid: entry['oid'],
+ size: entry['size'],
+ headers: headers,
+ link: add_credentials(link)
+ )
rescue DownloadLinkNotFound, Addressable::URI::InvalidURIError
log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.")
end
diff --git a/app/services/projects/lfs_pointers/lfs_link_service.rb b/app/services/projects/lfs_pointers/lfs_link_service.rb
index cf3cc5cd8e0..f8f03d481af 100644
--- a/app/services/projects/lfs_pointers/lfs_link_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_link_service.rb
@@ -15,9 +15,9 @@ module Projects
def execute(oids)
return [] unless project&.lfs_enabled?
- if oids.size > MAX_OIDS
- raise TooManyOidsError, 'Too many LFS object ids to link, please push them manually'
- end
+ validate!(oids)
+
+ yield if block_given?
# Search and link existing LFS Object
link_existing_lfs_objects(oids)
@@ -25,6 +25,12 @@ module Projects
private
+ def validate!(oids)
+ return if oids.size <= MAX_OIDS
+
+ raise TooManyOidsError, 'Too many LFS object ids to link, please push them manually'
+ end
+
def link_existing_lfs_objects(oids)
linked_existing_objects = []
iterations = 0
diff --git a/app/services/projects/open_issues_count_service.rb b/app/services/projects/open_issues_count_service.rb
index 925512f31d7..d31f4596fa5 100644
--- a/app/services/projects/open_issues_count_service.rb
+++ b/app/services/projects/open_issues_count_service.rb
@@ -26,7 +26,7 @@ module Projects
def user_is_at_least_reporter?
strong_memoize(:user_is_at_least_reporter) do
- @user && @project.team.member?(@user, Gitlab::Access::REPORTER)
+ @project.member?(@user, Gitlab::Access::REPORTER)
end
end
diff --git a/app/services/projects/open_merge_requests_count_service.rb b/app/services/projects/open_merge_requests_count_service.rb
index 76ec13952ab..c67ebf2f26a 100644
--- a/app/services/projects/open_merge_requests_count_service.rb
+++ b/app/services/projects/open_merge_requests_count_service.rb
@@ -4,12 +4,12 @@ module Projects
# Service class for counting and caching the number of open merge requests of
# a project.
class OpenMergeRequestsCountService < Projects::CountService
- def relation_for_count
- @project.merge_requests.opened
- end
-
def cache_key_name
'open_merge_requests_count'
end
+
+ def self.query(project_ids)
+ MergeRequest.opened.of_projects(project_ids)
+ end
end
end
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index b2166dc84c7..d0bef9da329 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -93,7 +93,7 @@ module Projects
sentry_project_id: settings.dig(:project, :sentry_project_id)
}
}
- params[:error_tracking_setting_attributes][:token] = settings[:token] unless /\A\*+\z/.match?(settings[:token]) # Don't update token if we receive masked value
+ params[:error_tracking_setting_attributes][:token] = settings[:token] unless ::ErrorTracking::SentryClient::Token.masked_token?(settings[:token]) # Don't update token if we receive masked value
params[:error_tracking_setting_attributes][:integrated] = settings[:integrated] unless settings[:integrated].nil?
params
diff --git a/app/services/projects/overwrite_project_service.rb b/app/services/projects/overwrite_project_service.rb
index d3fed43363c..aff258c418b 100644
--- a/app/services/projects/overwrite_project_service.rb
+++ b/app/services/projects/overwrite_project_service.rb
@@ -45,11 +45,13 @@ module Projects
duration = ::Gitlab::Metrics::System.monotonic_time - start_time
- Gitlab::AppJsonLogger.info(class: self.class.name,
- namespace_id: source_project.namespace_id,
- project_id: source_project.id,
- duration_s: duration.to_f,
- error: exception.class.name)
+ Gitlab::AppJsonLogger.info(
+ class: self.class.name,
+ namespace_id: source_project.namespace_id,
+ project_id: source_project.id,
+ duration_s: duration.to_f,
+ error: exception.class.name
+ )
end
def move_relationships_between(source_project, target_project)
@@ -83,9 +85,11 @@ module Projects
# we won't be able to query the database (only through its cached data),
# for its former relationships. That's why we're adding it to the network
# as a fork of the target project
- ForkNetworkMember.create!(fork_network: fork_network,
- project: source_project,
- forked_from_project: @project)
+ ForkNetworkMember.create!(
+ fork_network: fork_network,
+ project: source_project,
+ forked_from_project: @project
+ )
end
def remove_source_project_from_fork_network(source_project)
diff --git a/app/services/projects/protect_default_branch_service.rb b/app/services/projects/protect_default_branch_service.rb
index 5360902038b..0aca525921c 100644
--- a/app/services/projects/protect_default_branch_service.rb
+++ b/app/services/projects/protect_default_branch_service.rb
@@ -45,11 +45,7 @@ module Projects
end
def protected_branch_exists?
- if Feature.enabled?(:group_protected_branches)
- project.all_protected_branches.find_by_name(default_branch).present?
- else
- project.protected_branches.find_by_name(default_branch).present?
- end
+ project.all_protected_branches.find_by_name(default_branch).present?
end
def default_branch
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index ed99c69be07..4a9d96d266c 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -63,8 +63,8 @@ module Projects
raise TransferError, s_('TransferProject|Project cannot be transferred, because tags are present in its container registry')
end
- if project.has_packages?(:npm) && !new_namespace_has_same_root?(project)
- raise TransferError, s_("TransferProject|Root namespace can't be updated if project has NPM packages")
+ if !new_namespace_has_same_root?(project) && project.has_namespaced_npm_packages?
+ raise TransferError, s_("TransferProject|Root namespace can't be updated if the project has NPM packages scoped to the current root level namespace.")
end
proceed_to_transfer
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 0fadd75669e..403f645392c 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -90,7 +90,8 @@ module Projects
file: file,
file_count: deployment_update.entries_count,
file_sha256: sha256,
- ci_build_id: build.id
+ ci_build_id: build.id,
+ root_directory: build.options[:publish]
)
break if deployment.size != file.size || deployment.file.size != file.size
diff --git a/app/services/projects/update_remote_mirror_service.rb b/app/services/projects/update_remote_mirror_service.rb
index aca6fa91eb1..b048ec128d8 100644
--- a/app/services/projects/update_remote_mirror_service.rb
+++ b/app/services/projects/update_remote_mirror_service.rb
@@ -75,12 +75,14 @@ module Projects
end
if message.present?
- Gitlab::AppJsonLogger.info(message: "Error synching remote mirror",
- project_id: project.id,
- project_path: project.full_path,
- remote_mirror_id: remote_mirror.id,
- lfs_sync_failed: lfs_sync_failed,
- divergent_ref_list: response.divergent_refs)
+ Gitlab::AppJsonLogger.info(
+ message: "Error synching remote mirror",
+ project_id: project.id,
+ project_path: project.full_path,
+ remote_mirror_id: remote_mirror.id,
+ lfs_sync_failed: lfs_sync_failed,
+ divergent_ref_list: response.divergent_refs
+ )
end
[failed, message]
diff --git a/app/services/projects/update_repository_storage_service.rb b/app/services/projects/update_repository_storage_service.rb
index 7c63216af5e..cadf3012131 100644
--- a/app/services/projects/update_repository_storage_service.rb
+++ b/app/services/projects/update_repository_storage_service.rb
@@ -25,19 +25,6 @@ module Projects
end
end
- # The underlying FetchInternalRemote call uses a `git fetch` to move data
- # to the new repository, which leaves it in a less-well-packed state,
- # lacking bitmaps and commit graphs. Housekeeping will boost performance
- # significantly.
- def enqueue_housekeeping
- return unless Gitlab::CurrentSettings.housekeeping_enabled?
- return unless Feature.enabled?(:repack_after_shard_migration, project)
-
- Repositories::HousekeepingService.new(project, :gc).execute
- rescue Repositories::HousekeepingService::LeaseTaken
- # No action required
- end
-
def remove_old_paths
super
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index 301d11d841c..7f25ab5883f 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -10,6 +10,8 @@ module Projects
def execute
build_topics
remove_unallowed_params
+ add_pages_unique_domain
+
validate!
ensure_wiki_exists if enabling_wiki?
@@ -48,6 +50,24 @@ module Projects
private
+ def add_pages_unique_domain
+ if Feature.disabled?(:pages_unique_domain, project)
+ params[:project_setting_attributes]&.delete(:pages_unique_domain_enabled)
+
+ return
+ end
+
+ return unless params.dig(:project_setting_attributes, :pages_unique_domain_enabled)
+
+ # If the project used a unique domain once, it'll always use the same
+ return if project.project_setting.pages_unique_domain_in_database.present?
+
+ params[:project_setting_attributes][:pages_unique_domain] = Gitlab::Pages::RandomDomain.generate(
+ project_path: project.path,
+ namespace_path: project.parent.full_path
+ )
+ end
+
def validate!
unless valid_visibility_level_change?(project, project.visibility_attribute_value(params))
raise ValidationError, s_('UpdateProject|New visibility level not allowed!')
@@ -100,6 +120,8 @@ module Projects
def remove_unallowed_params
params.delete(:emails_disabled) unless can?(current_user, :set_emails_disabled, project)
+
+ params.delete(:runner_registration_enabled) if Gitlab::CurrentSettings.valid_runner_registrars.exclude?('project')
end
def after_update
diff --git a/app/services/protected_branches/base_service.rb b/app/services/protected_branches/base_service.rb
index 951017b2d01..0ab46bf236c 100644
--- a/app/services/protected_branches/base_service.rb
+++ b/app/services/protected_branches/base_service.rb
@@ -18,6 +18,10 @@ module ProtectedBranches
def refresh_cache
CacheService.new(@project_or_group, @current_user, @params).refresh
+ rescue StandardError => e
+ Gitlab::ErrorTracking.track_exception(e)
end
end
end
+
+ProtectedBranches::BaseService.prepend_mod
diff --git a/app/services/protected_branches/cache_service.rb b/app/services/protected_branches/cache_service.rb
index 4a9fc335421..cb2977796d7 100644
--- a/app/services/protected_branches/cache_service.rb
+++ b/app/services/protected_branches/cache_service.rb
@@ -73,20 +73,25 @@ module ProtectedBranches
end
def redis_key
- @redis_key ||= if Feature.enabled?(:group_protected_branches)
+ group = project_or_group.is_a?(Group) ? project_or_group : project_or_group.group
+ @redis_key ||= if allow_protected_branches_for_group?(group)
[CACHE_ROOT_KEY, project_or_group.class.name, project_or_group.id].join(':')
else
[CACHE_ROOT_KEY, project_or_group.id].join(':')
end
end
+ def allow_protected_branches_for_group?(group)
+ Feature.enabled?(:group_protected_branches, group) ||
+ Feature.enabled?(:allow_protected_branches_for_group, group)
+ end
+
def metrics
@metrics ||= Gitlab::Cache::Metrics.new(cache_metadata)
end
def cache_metadata
Gitlab::Cache::Metadata.new(
- caller_id: Gitlab::ApplicationContext.current_context_attribute(:caller_id),
cache_identifier: "#{self.class}#fetch",
feature_category: :source_code_management,
backing_resource: :cpu
diff --git a/app/services/quick_actions/interpret_service.rb b/app/services/quick_actions/interpret_service.rb
index f1e4dac8835..b5f6bff756b 100644
--- a/app/services/quick_actions/interpret_service.rb
+++ b/app/services/quick_actions/interpret_service.rb
@@ -11,6 +11,7 @@ module QuickActions
include Gitlab::QuickActions::CommitActions
include Gitlab::QuickActions::CommonActions
include Gitlab::QuickActions::RelateActions
+ include Gitlab::QuickActions::WorkItemActions
attr_reader :quick_action_target
@@ -49,12 +50,13 @@ module QuickActions
# Takes a text and interprets the commands that are extracted from it.
# Returns the content without commands, and array of changes explained.
- def explain(content, quick_action_target)
+ # `keep_actions: true` will keep the quick actions in the content.
+ def explain(content, quick_action_target, keep_actions: false)
return [content, []] unless current_user.can?(:use_quick_actions)
@quick_action_target = quick_action_target
- content, commands = extractor.extract_commands(content)
+ content, commands = extractor(keep_actions).extract_commands(content)
commands = explain_commands(commands)
[content, commands]
end
@@ -65,8 +67,8 @@ module QuickActions
raise Gitlab::QuickActions::CommandDefinition::ParseError, message
end
- def extractor
- Gitlab::QuickActions::Extractor.new(self.class.command_definitions)
+ def extractor(keep_actions = false)
+ Gitlab::QuickActions::Extractor.new(self.class.command_definitions, keep_actions: keep_actions)
end
# Find users for commands like /assign
diff --git a/app/services/releases/create_service.rb b/app/services/releases/create_service.rb
index a3289f9e552..e5883ca06f4 100644
--- a/app/services/releases/create_service.rb
+++ b/app/services/releases/create_service.rb
@@ -18,6 +18,12 @@ module Releases
return tag unless tag.is_a?(Gitlab::Git::Tag)
+ if project.catalog_resource
+ response = Ci::Catalog::ValidateResourceService.new(project, ref).execute
+
+ return error(response.message) if response.error?
+ end
+
create_release(tag, evidence_pipeline)
end
diff --git a/app/services/releases/links/base_service.rb b/app/services/releases/links/base_service.rb
new file mode 100644
index 00000000000..8bab258f80a
--- /dev/null
+++ b/app/services/releases/links/base_service.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Releases
+ module Links
+ REASON_BAD_REQUEST = :bad_request
+ REASON_NOT_FOUND = :not_found
+ REASON_FORBIDDEN = :forbidden
+
+ class BaseService
+ attr_accessor :release, :current_user, :params
+
+ def initialize(release, current_user = nil, params = {})
+ @release = release
+ @current_user = current_user
+ @params = params.dup
+ end
+
+ private
+
+ def allowed_params
+ @allowed_params ||= params.slice(:name, :url, :link_type).tap do |hash|
+ hash[:filepath] = filepath if provided_filepath?
+ end
+ end
+
+ def provided_filepath?
+ params.key?(:direct_asset_path) || params.key?(:filepath)
+ end
+
+ def filepath
+ params[:direct_asset_path] || params[:filepath]
+ end
+ end
+ end
+end
diff --git a/app/services/releases/links/create_service.rb b/app/services/releases/links/create_service.rb
new file mode 100644
index 00000000000..94823c54596
--- /dev/null
+++ b/app/services/releases/links/create_service.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Releases
+ module Links
+ class CreateService < BaseService
+ def execute
+ return ServiceResponse.error(reason: REASON_FORBIDDEN, message: _('Access Denied')) unless allowed?
+
+ link = release.links.create(allowed_params)
+
+ if link.persisted?
+ ServiceResponse.success(payload: { link: link })
+ else
+ ServiceResponse.error(reason: REASON_BAD_REQUEST, message: link.errors.full_messages)
+ end
+ end
+
+ private
+
+ def allowed?
+ Ability.allowed?(current_user, :create_release, release)
+ end
+ end
+ end
+end
diff --git a/app/services/releases/links/destroy_service.rb b/app/services/releases/links/destroy_service.rb
new file mode 100644
index 00000000000..1c1158017bb
--- /dev/null
+++ b/app/services/releases/links/destroy_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Releases
+ module Links
+ class DestroyService < BaseService
+ def execute(link)
+ return ServiceResponse.error(reason: REASON_FORBIDDEN, message: _('Access Denied')) unless allowed?
+ return ServiceResponse.error(reason: REASON_NOT_FOUND, message: _('Link does not exist')) unless link
+
+ if link.destroy
+ ServiceResponse.success(payload: { link: link })
+ else
+ ServiceResponse.error(reason: REASON_BAD_REQUEST, message: link.errors.full_messages)
+ end
+ end
+
+ private
+
+ def allowed?
+ Ability.allowed?(current_user, :destroy_release, release)
+ end
+ end
+ end
+end
diff --git a/app/services/releases/links/update_service.rb b/app/services/releases/links/update_service.rb
new file mode 100644
index 00000000000..c29de86f31b
--- /dev/null
+++ b/app/services/releases/links/update_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Releases
+ module Links
+ class UpdateService < BaseService
+ def execute(link)
+ return ServiceResponse.error(reason: REASON_FORBIDDEN, message: _('Access Denied')) unless allowed?
+ return ServiceResponse.error(reason: REASON_NOT_FOUND, message: _('Link does not exist')) unless link
+
+ if link.update(allowed_params)
+ ServiceResponse.success(payload: { link: link })
+ else
+ ServiceResponse.error(reason: REASON_BAD_REQUEST, message: link.errors.full_messages)
+ end
+ end
+
+ private
+
+ def allowed?
+ Ability.allowed?(current_user, :update_release, release)
+ end
+ end
+ end
+end
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index f6fe23b4555..553315f08f9 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -2,6 +2,8 @@
module ResourceAccessTokens
class CreateService < BaseService
+ include Gitlab::Utils::StrongMemoize
+
def initialize(current_user, resource, params = {})
@resource_type = resource.class.name.downcase
@resource = resource
@@ -25,7 +27,7 @@ module ResourceAccessTokens
unless member.persisted?
delete_failed_user(user)
- return error("Could not provision #{Gitlab::Access.human_access(access_level).downcase} access to project access token")
+ return error("Could not provision #{Gitlab::Access.human_access(access_level.to_i).downcase} access to the access token. ERROR: #{member.errors.full_messages.to_sentence}")
end
token_response = create_personal_access_token(user)
@@ -43,6 +45,14 @@ module ResourceAccessTokens
attr_reader :resource_type, :resource
+ def username_and_email_generator
+ Gitlab::Utils::UsernameAndEmailGenerator.new(
+ username_prefix: "#{resource_type}_#{resource.id}_bot",
+ email_domain: "noreply.#{Gitlab.config.gitlab.host}"
+ )
+ end
+ strong_memoize_attr :username_and_email_generator
+
def has_permission_to_create?
%w(project group).include?(resource_type) && can?(current_user, :create_resource_access_tokens, resource)
end
@@ -63,31 +73,13 @@ module ResourceAccessTokens
def default_user_params
{
name: params[:name] || "#{resource.name.to_s.humanize} bot",
- email: generate_email,
- username: generate_username,
+ email: username_and_email_generator.email,
+ username: username_and_email_generator.username,
user_type: :project_bot,
skip_confirmation: true # Bot users should always have their emails confirmed.
}
end
- def generate_username
- base_username = "#{resource_type}_#{resource.id}_bot"
-
- uniquify.string(base_username) { |s| User.find_by_username(s) }
- end
-
- def generate_email
- email_pattern = "#{resource_type}#{resource.id}_bot%s@noreply.#{Gitlab.config.gitlab.host}"
-
- uniquify.string(-> (n) { Kernel.sprintf(email_pattern, n) }) do |s|
- User.find_by_email(s)
- end
- end
-
- def uniquify
- Uniquify.new
- end
-
def create_personal_access_token(user)
PersonalAccessTokens::CreateService.new(
current_user: user, target_user: user, params: personal_access_token_params
@@ -108,7 +100,15 @@ module ResourceAccessTokens
end
def create_membership(resource, user, access_level)
- resource.add_member(user, access_level, expires_at: params[:expires_at])
+ resource.add_member(user, access_level, expires_at: default_pat_expiration)
+ end
+
+ def default_pat_expiration
+ if Feature.enabled?(:default_pat_expiration)
+ params[:expires_at].presence || PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now
+ else
+ params[:expires_at]
+ end
end
def log_event(token)
diff --git a/app/services/resource_events/change_labels_service.rb b/app/services/resource_events/change_labels_service.rb
index 02182bc3a77..69e68922b91 100644
--- a/app/services/resource_events/change_labels_service.rb
+++ b/app/services/resource_events/change_labels_service.rb
@@ -55,7 +55,7 @@ module ResourceEvents
end
def create_timeline_events_from(added_labels: [], removed_labels: [])
- return unless resource.incident?
+ return unless resource.incident_type_issue?
IncidentManagement::TimelineEvents::CreateService.change_labels(
resource,
diff --git a/app/services/security/ci_configuration/base_create_service.rb b/app/services/security/ci_configuration/base_create_service.rb
index 3e8865d3dff..b60a949fd4e 100644
--- a/app/services/security/ci_configuration/base_create_service.rb
+++ b/app/services/security/ci_configuration/base_create_service.rb
@@ -19,7 +19,8 @@ module Security
target: '_blank',
rel: 'noopener noreferrer'
raise Gitlab::Graphql::Errors::MutationError,
- _(format('You must %s before using Security features.', docs_link.html_safe)).html_safe
+ Gitlab::Utils::ErrorMessage.to_user_facing(
+ _(format('You must %s before using Security features.', docs_link.html_safe)).html_safe)
end
project.repository.add_branch(current_user, branch_name, project.default_branch)
@@ -51,14 +52,15 @@ module Security
end
def existing_gitlab_ci_content
- root_ref = root_ref_sha(project)
+ root_ref = root_ref_sha(project.repository)
return if root_ref.nil?
@gitlab_ci_yml ||= project.ci_config_for(root_ref)
YAML.safe_load(@gitlab_ci_yml) if @gitlab_ci_yml
rescue Psych::BadAlias
raise Gitlab::Graphql::Errors::MutationError,
- ".gitlab-ci.yml with aliases/anchors is not supported. Please change the CI configuration manually."
+ Gitlab::Utils::ErrorMessage.to_user_facing(
+ _(".gitlab-ci.yml with aliases/anchors is not supported. Please change the CI configuration manually."))
rescue Psych::Exception => e
Gitlab::AppLogger.error("Failed to process existing .gitlab-ci.yml: #{e.message}")
raise Gitlab::Graphql::Errors::MutationError,
@@ -82,13 +84,10 @@ module Security
)
end
- def root_ref_sha(project)
- project.repository.root_ref_sha
- rescue StandardError => e
- # this might fail on the very first commit,
- # and unfortunately it raises a StandardError
- Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
- nil
+ def root_ref_sha(repository)
+ commit = repository.commit(repository.root_ref)
+
+ commit&.sha
end
end
end
diff --git a/app/services/serverless/associate_domain_service.rb b/app/services/serverless/associate_domain_service.rb
deleted file mode 100644
index 0c6ee58924c..00000000000
--- a/app/services/serverless/associate_domain_service.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-module Serverless
- class AssociateDomainService
- PLACEHOLDER_HOSTNAME = 'example.com'
-
- def initialize(knative, pages_domain_id:, creator:)
- @knative = knative
- @pages_domain_id = pages_domain_id
- @creator = creator
- end
-
- def execute
- return if unchanged?
-
- knative.hostname ||= PLACEHOLDER_HOSTNAME
-
- knative.pages_domain = knative.find_available_domain(pages_domain_id)
- knative.serverless_domain_cluster.update(creator: creator) if knative.pages_domain
- end
-
- private
-
- attr_reader :knative, :pages_domain_id, :creator
-
- def unchanged?
- knative.pages_domain&.id == pages_domain_id
- end
- end
-end
diff --git a/app/services/spam/spam_action_service.rb b/app/services/spam/spam_action_service.rb
index 9c52e9f0cd3..7c96f003e46 100644
--- a/app/services/spam/spam_action_service.rb
+++ b/app/services/spam/spam_action_service.rb
@@ -53,7 +53,7 @@ module Spam
end
def allowlisted?(user)
- user.try(:gitlab_employee?) || user.try(:gitlab_bot?) || user.try(:gitlab_service_user?)
+ user.try(:gitlab_bot?) || user.try(:gitlab_service_user?)
end
##
diff --git a/app/services/spam/spam_verdict_service.rb b/app/services/spam/spam_verdict_service.rb
index 4ec07bb4c5f..1279adf327b 100644
--- a/app/services/spam/spam_verdict_service.rb
+++ b/app/services/spam/spam_verdict_service.rb
@@ -14,57 +14,47 @@ module Spam
end
def execute
- spamcheck_result = nil
- spamcheck_attribs = {}
- spamcheck_error = false
+ spamcheck_verdict = nil
external_spam_check_round_trip_time = Benchmark.realtime do
- spamcheck_result, spamcheck_attribs, spamcheck_error = spamcheck_verdict
+ spamcheck_verdict = get_spamcheck_verdict
end
- label = spamcheck_error ? 'ERROR' : spamcheck_result.to_s.upcase
+ histogram.observe({ result: spamcheck_verdict.upcase }, external_spam_check_round_trip_time) if spamcheck_verdict
- histogram.observe({ result: label }, external_spam_check_round_trip_time)
-
- # assign result to a var for logging it before reassigning to nil when monitorMode is true
- original_spamcheck_result = spamcheck_result
-
- spamcheck_result = nil if spamcheck_attribs&.fetch("monitorMode", "false") == "true"
-
- akismet_result = akismet_verdict
+ akismet_verdict = get_akismet_verdict
# filter out anything we don't recognise, including nils.
- valid_results = [spamcheck_result, akismet_result].compact.select { |r| SUPPORTED_VERDICTS.key?(r) }
+ valid_verdicts = [spamcheck_verdict, akismet_verdict].compact.select { |r| SUPPORTED_VERDICTS.key?(r) }
# Treat nils - such as service unavailable - as ALLOW
- return ALLOW unless valid_results.any?
+ return ALLOW unless valid_verdicts.any?
- # Favour the most restrictive result.
- verdict = valid_results.min_by { |v| SUPPORTED_VERDICTS[v][:priority] }
+ # Favour the most restrictive verdict
+ final_verdict = valid_verdicts.min_by { |v| SUPPORTED_VERDICTS[v][:priority] }
# The target can override the verdict via the `allow_possible_spam` application setting
- verdict = OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM if override_via_allow_possible_spam?(verdict: verdict)
+ final_verdict = OVERRIDE_VIA_ALLOW_POSSIBLE_SPAM if override_via_allow_possible_spam?(verdict: final_verdict)
logger.info(class: self.class.name,
akismet_verdict: akismet_verdict,
- spam_check_verdict: original_spamcheck_result,
- extra_attributes: spamcheck_attribs,
+ spam_check_verdict: spamcheck_verdict,
spam_check_rtt: external_spam_check_round_trip_time.real,
- final_verdict: verdict,
+ final_verdict: final_verdict,
username: user.username,
user_id: user.id,
target_type: target.class.to_s,
project_id: target.project_id
)
- verdict
+ final_verdict
end
private
attr_reader :user, :target, :options, :context, :extra_features
- def akismet_verdict
+ def get_akismet_verdict
if akismet.spam?
Gitlab::Recaptcha.enabled? ? CONDITIONAL_ALLOW : DISALLOW
else
@@ -72,23 +62,21 @@ module Spam
end
end
- def spamcheck_verdict
+ def get_spamcheck_verdict
return unless Gitlab::CurrentSettings.spam_check_endpoint_enabled
begin
- result, attribs, _error = spamcheck_client.spam?(spammable: target, user: user, context: context,
- extra_features: extra_features)
- # @TODO log if error is not nil https://gitlab.com/gitlab-org/gitlab/-/issues/329545
+ result = spamcheck_client.spam?(spammable: target, user: user, context: context, extra_features: extra_features)
- return [nil, attribs] unless result
+ if result.evaluated? && Feature.enabled?(:user_spam_scores)
+ Abuse::TrustScore.create!(user: user, score: result.score, source: :spamcheck)
+ end
- [result, attribs]
+ result.verdict
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(e, error: ERROR_TYPE)
-
- # Default to ALLOW if any errors occur
- [ALLOW, attribs, true]
+ nil
end
end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 9de73a00eac..5f71b7ac9e9 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -388,8 +388,8 @@ module SystemNoteService
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project).log_resolving_alert(monitoring_tool)
end
- def change_issue_type(issue, author)
- ::SystemNotes::IssuablesService.new(noteable: issue, project: issue.project, author: author).change_issue_type
+ def change_issue_type(issue, author, previous_type)
+ ::SystemNotes::IssuablesService.new(noteable: issue, project: issue.project, author: author).change_issue_type(previous_type)
end
def add_timeline_event(timeline_event)
diff --git a/app/services/system_notes/commit_service.rb b/app/services/system_notes/commit_service.rb
index 592351079aa..e4d89ecb930 100644
--- a/app/services/system_notes/commit_service.rb
+++ b/app/services/system_notes/commit_service.rb
@@ -2,6 +2,8 @@
module SystemNotes
class CommitService < ::SystemNotes::BaseService
+ NEW_COMMIT_DISPLAY_LIMIT = 10
+
# Called when commits are added to a merge request
#
# new_commits - Array of Commits added since last push
@@ -36,25 +38,73 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'tag'))
end
+ private
+
# Build an Array of lines detailing each commit added in a merge request
#
# new_commits - Array of new Commit objects
#
# Returns an Array of Strings
- def new_commit_summary(new_commits)
+ def new_commits_list(new_commits)
new_commits.collect do |commit|
content_tag('li', "#{commit.short_id} - #{commit.title}")
end
end
- private
+ # Builds an Array of lines describing each commit and truncate them based on the limit
+ # to avoid creating a note with a large number of commits.
+ #
+ # commits - Array of Commit objects
+ #
+ # Returns an Array of Strings
+ #
+ # rubocop: disable CodeReuse/ActiveRecord
+ def new_commit_summary(commits, start_rev)
+ if commits.size > NEW_COMMIT_DISPLAY_LIMIT
+ no_of_commits_to_truncate = commits.size - NEW_COMMIT_DISPLAY_LIMIT
+ commits_to_truncate = commits.take(no_of_commits_to_truncate)
+ remaining_commits = commits.drop(no_of_commits_to_truncate)
+
+ [truncated_new_commits(commits_to_truncate, start_rev)] + new_commits_list(remaining_commits)
+ else
+ new_commits_list(commits)
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Builds a summary line that describes given truncated commits.
+ #
+ # commits - Array of Commit objects
+ # start_rev - String SHA of a Commit that will be used as the starting SHA of the range
+ #
+ # Returns a String wrapped in 'li' tag.
+ def truncated_new_commits(commits, start_rev)
+ count = commits.size
+
+ commit_ids = if count == 1
+ commits.first.short_id
+ elsif start_rev && !Gitlab::Git.blank_ref?(start_rev)
+ "#{Commit.truncate_sha(start_rev)}...#{commits.last.short_id}"
+ else
+ # This two-dots notation seems to be not functioning as expected, but we should
+ # fallback to it as start_rev can be empty.
+ #
+ # For more information, please see https://gitlab.com/gitlab-org/gitlab/-/issues/391809
+ "#{commits.first.short_id}..#{commits.last.short_id}"
+ end
+
+ commits_text = "#{count} earlier commit".pluralize(count)
+
+ content_tag('li', "#{commit_ids} - #{commits_text}")
+ end
# Builds a list of existing and new commits according to existing_commits and
# new_commits methods.
# Returns a String wrapped in `ul` and `li` tags.
def commits_list(noteable, new_commits, existing_commits, oldrev)
existing_commit_summary = existing_commit_summary(noteable, existing_commits, oldrev)
- new_commit_summary = new_commit_summary(new_commits).join
+ start_rev = existing_commits.empty? ? oldrev : existing_commits.last.id
+ new_commit_summary = new_commit_summary(new_commits, start_rev).join
content_tag('ul', "#{existing_commit_summary}#{new_commit_summary}".html_safe)
end
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index ad9f0dd0368..61a4316e8ae 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -456,8 +456,10 @@ module SystemNotes
create_resource_state_event(status: 'closed', close_auto_resolve_prometheus_alert: true)
end
- def change_issue_type
- body = "changed issue type to #{noteable.issue_type.humanize(capitalize: false)}"
+ def change_issue_type(previous_type)
+ previous = previous_type.humanize(capitalize: false)
+ new = noteable.issue_type.humanize(capitalize: false)
+ body = "changed type from #{previous} to #{new}"
create_note(NoteSummary.new(noteable, project, author, body, action: 'issue_type'))
end
diff --git a/app/services/system_notes/time_tracking_service.rb b/app/services/system_notes/time_tracking_service.rb
index c5bdbc6799e..b7a2afbaf15 100644
--- a/app/services/system_notes/time_tracking_service.rb
+++ b/app/services/system_notes/time_tracking_service.rb
@@ -147,7 +147,7 @@ module SystemNotes
readable_date = date_key.humanize.downcase
if changed_date.nil?
- "removed #{readable_date}"
+ "removed #{readable_date} #{changed_dates[date_key].first.to_s(:long)}"
else
"changed #{readable_date} to #{changed_date.to_s(:long)}"
end
diff --git a/app/services/tasks_to_be_done/base_service.rb b/app/services/tasks_to_be_done/base_service.rb
index 5851a2cb9e5..1c74e803e0b 100644
--- a/app/services/tasks_to_be_done/base_service.rb
+++ b/app/services/tasks_to_be_done/base_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module TasksToBeDone
- class BaseService < ::IssuableBaseService
+ class BaseService < ::BaseContainerService
LABEL_PREFIX = 'tasks to be done'
def initialize(container:, current_user:, assignee_ids: [])
@@ -11,7 +11,7 @@ module TasksToBeDone
description: description,
add_labels: label_name
}
- super(project: container, current_user: current_user, params: params)
+ super(container: container, current_user: current_user, params: params)
end
def execute
@@ -19,8 +19,8 @@ module TasksToBeDone
update_service = Issues::UpdateService.new(container: project, current_user: current_user, params: { add_assignee_ids: params[:assignee_ids] })
update_service.execute(issue)
else
- build_service = Issues::BuildService.new(container: project, current_user: current_user, params: params)
- create(build_service.execute)
+ create_service = Issues::CreateService.new(container: project, current_user: current_user, params: params, spam_params: nil)
+ create_service.execute
end
end
diff --git a/app/services/terraform/remote_state_handler.rb b/app/services/terraform/remote_state_handler.rb
index 849afaddec6..f72bf0390e4 100644
--- a/app/services/terraform/remote_state_handler.rb
+++ b/app/services/terraform/remote_state_handler.rb
@@ -2,6 +2,8 @@
module Terraform
class RemoteStateHandler < BaseService
+ include Gitlab::OptimisticLocking
+
StateLockedError = Class.new(StandardError)
StateDeletedError = Class.new(StandardError)
UnauthorizedError = Class.new(StandardError)
@@ -59,7 +61,9 @@ module Terraform
private
def retrieve_with_lock(find_only: false)
- create_or_find!(find_only: find_only).tap { |state| state.with_lock { yield state } }
+ create_or_find!(find_only: find_only).tap do |state|
+ retry_lock(state, name: "Terraform state: #{state.id}") { yield state }
+ end
end
def create_or_find!(find_only:)
@@ -70,7 +74,7 @@ module Terraform
state = if find_only
find_state!(find_params)
else
- Terraform::State.create_or_find_by(find_params)
+ Terraform::State.safe_find_or_create_by(find_params)
end
raise StateDeletedError if state.deleted_at?
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index 42a8aca17d3..c55e1680bfe 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -175,13 +175,26 @@ class TodoService
TodosFinder.new(current_user).any_for_target?(issuable, :pending)
end
- # Resolves all todos related to target
+ # Resolves all todos related to target for the current_user
def resolve_todos_for_target(target, current_user)
attributes = attributes_for_target(target)
resolve_todos(pending_todos([current_user], attributes), current_user)
end
+ # Resolves all todos related to target for all users
+ def resolve_todos_with_attributes_for_target(target, attributes, resolution: :done, resolved_by_action: :system_done)
+ target_attributes = { target_id: target.id, target_type: target.class.polymorphic_name }
+ attributes.merge!(target_attributes)
+ attributes[:preload_user_association] = true
+
+ todos = PendingTodosFinder.new(attributes).execute
+ users = todos.map(&:user)
+ todos_ids = todos.batch_update(state: resolution, resolved_by_action: resolved_by_action)
+ users.each(&:update_todos_count_cache)
+ todos_ids
+ end
+
def resolve_todos(todos, current_user, resolution: :done, resolved_by_action: :system_done)
todos_ids = todos.batch_update(state: resolution, resolved_by_action: resolved_by_action)
@@ -198,21 +211,20 @@ class TodoService
current_user.update_todos_count_cache
end
- def resolve_access_request_todos(current_user, member)
- return if current_user.nil? || member.nil?
+ def resolve_access_request_todos(member)
+ return if member.nil?
+ # Group or Project
target = member.source
- finder_params = {
+ todos_params = {
state: :pending,
author_id: member.user_id,
- action_id: ::Todo::MEMBER_ACCESS_REQUESTED,
- type: target.class.polymorphic_name,
- target: target.id
+ action: ::Todo::MEMBER_ACCESS_REQUESTED,
+ type: target.class.polymorphic_name
}
- todos = TodosFinder.new(current_user, finder_params).execute
- resolve_todos(todos, current_user)
+ resolve_todos_with_attributes_for_target(target, todos_params)
end
def restore_todos(todos, current_user)
@@ -419,7 +431,7 @@ class TodoService
end
def pending_todos(users, criteria = {})
- PendingTodosFinder.new(users, criteria).execute
+ PendingTodosFinder.new(criteria.merge(users: users)).execute
end
def track_todo_creation(user, issue_type, namespace, project)
@@ -428,8 +440,6 @@ class TodoService
event = "incident_management_incident_todo"
track_usage_event(event, user.id)
- return unless Feature.enabled?(:route_hll_to_snowplow_phase2, namespace)
-
Gitlab::Tracking.event(
self.class.to_s,
event,
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index c8f9c28061f..24aa4aa1061 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -41,8 +41,6 @@ module Users
Gitlab::UsageDataCounters::HLLRedisCounter.track_event('unique_active_user', values: user.id)
- return unless Feature.enabled?(:route_hll_to_snowplow_phase3)
-
Gitlab::Tracking.event(
'Users::ActivityService',
'perform_action',
diff --git a/app/services/users/approve_service.rb b/app/services/users/approve_service.rb
index 353456c545d..53ec37d0ff7 100644
--- a/app/services/users/approve_service.rb
+++ b/app/services/users/approve_service.rb
@@ -17,6 +17,11 @@ module Users
user.accept_pending_invitations! if user.active_for_authentication?
DeviseMailer.user_admin_approval(user).deliver_later
+ if user.created_by_id
+ reset_token = user.generate_reset_token
+ NotificationService.new.new_user(user, reset_token)
+ end
+
log_event(user)
after_approve_hook(user)
success(message: 'Success', http_status: :created)
diff --git a/app/services/users/ban_service.rb b/app/services/users/ban_service.rb
index 959d4be3795..5ed31cdb778 100644
--- a/app/services/users/ban_service.rb
+++ b/app/services/users/ban_service.rb
@@ -17,3 +17,5 @@ module Users
end
end
end
+
+Users::BanService.prepend_mod_with('Users::BanService')
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index 934dccf2f76..04a11f41eb1 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -2,6 +2,8 @@
module Users
class BuildService < BaseService
+ ALLOWED_USER_TYPES = %i[project_bot security_policy_bot].freeze
+
delegate :user_default_internal_regex_enabled?,
:user_default_internal_regex_instance,
to: :'Gitlab::CurrentSettings.current_application_settings'
@@ -70,7 +72,7 @@ module Users
@user_params[:created_by_id] = current_user&.id
@user_params[:external] = user_external? if set_external_param?
- @user_params.delete(:user_type) unless project_bot?
+ @user_params.delete(:user_type) unless allowed_user_type?
end
def set_external_param?
@@ -81,8 +83,8 @@ module Users
user_default_internal_regex_instance.match(params[:email]).nil?
end
- def project_bot?
- user_params[:user_type]&.to_sym == :project_bot
+ def allowed_user_type?
+ ALLOWED_USER_TYPES.include?(user_params[:user_type]&.to_sym)
end
def password_reset
diff --git a/app/services/users/deactivate_service.rb b/app/services/users/deactivate_service.rb
new file mode 100644
index 00000000000..e69ce13d3cc
--- /dev/null
+++ b/app/services/users/deactivate_service.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module Users
+ class DeactivateService < BaseService
+ def initialize(current_user, skip_authorization: false)
+ @current_user = current_user
+ @skip_authorization = skip_authorization
+ end
+
+ def execute(user)
+ unless allowed?
+ return ::ServiceResponse.error(message: _('You are not authorized to perform this action'),
+ reason: :forbidden)
+ end
+
+ if user.blocked?
+ return ::ServiceResponse.error(message: _('Error occurred. A blocked user cannot be deactivated'),
+ reason: :forbidden)
+ end
+
+ if user.internal?
+ return ::ServiceResponse.error(message: _('Internal users cannot be deactivated'),
+ reason: :forbidden)
+ end
+
+ return ::ServiceResponse.success(message: _('User has already been deactivated')) if user.deactivated?
+
+ unless user.can_be_deactivated?
+ message = _(
+ 'The user you are trying to deactivate has been active in the past %{minimum_inactive_days} days ' \
+ 'and cannot be deactivated')
+
+ deactivation_error_message = format(message,
+ minimum_inactive_days: Gitlab::CurrentSettings.deactivate_dormant_users_period)
+ return ::ServiceResponse.error(message: deactivation_error_message, reason: :forbidden)
+ end
+
+ unless user.deactivate
+ return ::ServiceResponse.error(message: user.errors.full_messages.to_sentence,
+ reason: :bad_request)
+ end
+
+ log_event(user)
+
+ ::ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :current_user
+
+ def allowed?
+ return true if @skip_authorization
+
+ can?(current_user, :admin_all_resources)
+ end
+
+ def log_event(user)
+ Gitlab::AppLogger.info(message: 'User deactivated', user: user.username.to_s, email: user.email.to_s,
+ deactivated_by: current_user.username.to_s, ip_address: current_user.current_sign_in_ip.to_s)
+ end
+ end
+end
+
+Users::DeactivateService.prepend_mod_with('Users::DeactivateService')
diff --git a/app/services/users/email_verification/base_service.rb b/app/services/users/email_verification/base_service.rb
index 3337beec195..721290fe056 100644
--- a/app/services/users/email_verification/base_service.rb
+++ b/app/services/users/email_verification/base_service.rb
@@ -5,22 +5,23 @@ module Users
class BaseService
VALID_ATTRS = %i[unlock_token confirmation_token].freeze
- def initialize(attr:)
+ def initialize(attr:, user:)
@attr = attr
+ @user = user
validate_attr!
end
protected
- attr_reader :attr, :token
+ attr_reader :attr, :user, :token
def validate_attr!
raise ArgumentError, 'Invalid attribute' unless attr.in?(VALID_ATTRS)
end
def digest
- Devise.token_generator.digest(User, attr, token)
+ Devise.token_generator.digest(User, user.email, token)
end
end
end
diff --git a/app/services/users/email_verification/validate_token_service.rb b/app/services/users/email_verification/validate_token_service.rb
index b1b34e94f49..30413de805c 100644
--- a/app/services/users/email_verification/validate_token_service.rb
+++ b/app/services/users/email_verification/validate_token_service.rb
@@ -8,9 +8,8 @@ module Users
TOKEN_VALID_FOR_MINUTES = 60
def initialize(attr:, user:, token:)
- super(attr: attr)
+ super(attr: attr, user: user)
- @user = user
@token = token
end
diff --git a/app/services/users/unban_service.rb b/app/services/users/unban_service.rb
index 753a02fa752..2019f7e82e1 100644
--- a/app/services/users/unban_service.rb
+++ b/app/services/users/unban_service.rb
@@ -17,3 +17,5 @@ module Users
end
end
end
+
+Users::UnbanService.prepend_mod_with('Users::UnbanService')
diff --git a/app/services/users/unblock_service.rb b/app/services/users/unblock_service.rb
index 1302395662f..d80f65b5757 100644
--- a/app/services/users/unblock_service.rb
+++ b/app/services/users/unblock_service.rb
@@ -27,3 +27,5 @@ module Users
end
end
end
+
+Users::UnblockService.prepend_mod_with('Users::UnblockService')
diff --git a/app/services/users/update_service.rb b/app/services/users/update_service.rb
index 96018db5974..36c41c03303 100644
--- a/app/services/users/update_service.rb
+++ b/app/services/users/update_service.rb
@@ -6,6 +6,7 @@ module Users
attr_reader :user, :identity_params
ATTRS_REQUIRING_PASSWORD_CHECK = %w[email].freeze
+ BATCH_SIZE = 100
def initialize(current_user, params = {})
@current_user = current_user
@@ -34,7 +35,7 @@ module Users
reset_unconfirmed_email
if @user.save(validate: validate) && update_status
- notify_success(user_exists)
+ after_update(user_exists)
else
messages = @user.errors.full_messages + Array(@user.status&.errors&.full_messages)
error(messages.uniq.join('. '))
@@ -80,8 +81,6 @@ module Users
def notify_success(user_exists)
notify_new_user(@user, nil) unless user_exists
-
- success
end
def discard_read_only_attributes
@@ -118,6 +117,30 @@ module Users
def provider_params
identity_params.slice(*provider_attributes)
end
+
+ def after_update(user_exists)
+ notify_success(user_exists)
+ remove_followers_and_followee! if ::Feature.enabled?(:disable_follow_users, user)
+
+ success
+ end
+
+ def remove_followers_and_followee!
+ return false unless user.user_preference.enabled_following_previously_changed?(from: true, to: false)
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ loop do
+ inner_query = Users::UserFollowUser
+ .where(follower_id: user.id).or(Users::UserFollowUser.where(followee_id: user.id))
+ .select(:follower_id, :followee_id)
+ .limit(BATCH_SIZE)
+
+ deleted_records = Users::UserFollowUser.where('(follower_id, followee_id) IN (?)', inner_query).delete_all
+
+ break if deleted_records == 0
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
end
end
diff --git a/app/services/users/upsert_credit_card_validation_service.rb b/app/services/users/upsert_credit_card_validation_service.rb
index 7190c82bea3..61cf598f178 100644
--- a/app/services/users/upsert_credit_card_validation_service.rb
+++ b/app/services/users/upsert_credit_card_validation_service.rb
@@ -2,9 +2,8 @@
module Users
class UpsertCreditCardValidationService < BaseService
- def initialize(params, user)
+ def initialize(params)
@params = params.to_h.with_indifferent_access
- @current_user = user
end
def execute
@@ -19,8 +18,6 @@ module Users
::Users::CreditCardValidation.upsert(@params)
- ::Users::UpdateService.new(current_user, user: current_user, requires_credit_card_verification: false).execute!
-
ServiceResponse.success(message: 'CreditCardValidation was set')
rescue ActiveRecord::InvalidForeignKey, ActiveRecord::NotNullViolation => e
ServiceResponse.error(message: "Could not set CreditCardValidation: #{e.message}")
diff --git a/app/services/users/validate_manual_otp_service.rb b/app/services/users/validate_manual_otp_service.rb
index 96a827db13c..8ba76f5f593 100644
--- a/app/services/users/validate_manual_otp_service.rb
+++ b/app/services/users/validate_manual_otp_service.rb
@@ -3,6 +3,7 @@
module Users
class ValidateManualOtpService < BaseService
include ::Gitlab::Auth::Otp::Fortinet
+ include ::Gitlab::Auth::Otp::DuoAuth
def initialize(current_user)
@current_user = current_user
@@ -10,6 +11,8 @@ module Users
::Gitlab::Auth::Otp::Strategies::FortiAuthenticator::ManualOtp.new(current_user)
elsif forti_token_cloud_enabled?(current_user)
::Gitlab::Auth::Otp::Strategies::FortiTokenCloud.new(current_user)
+ elsif duo_auth_enabled?(current_user)
+ ::Gitlab::Auth::Otp::Strategies::DuoAuth::ManualOtp.new(current_user)
else
::Gitlab::Auth::Otp::Strategies::Devise.new(current_user)
end
diff --git a/app/services/work_items/create_service.rb b/app/services/work_items/create_service.rb
index eff2132039f..ae355dc6d96 100644
--- a/app/services/work_items/create_service.rb
+++ b/app/services/work_items/create_service.rb
@@ -2,6 +2,7 @@
module WorkItems
class CreateService < Issues::CreateService
+ extend ::Gitlab::Utils::Override
include WidgetableService
def initialize(container:, spam_params:, current_user: nil, params: {}, widget_params: {})
@@ -48,6 +49,15 @@ module WorkItems
private
+ override :handle_quick_actions
+ def handle_quick_actions(work_item)
+ # Do not handle quick actions unless the work item is the default Issue.
+ # The available quick actions for a work item depend on its type and widgets.
+ return if work_item.work_item_type != WorkItems::Type.default_by_type(:issue)
+
+ super
+ end
+
def authorization_action
:create_work_item
end
diff --git a/app/services/work_items/export_csv_service.rb b/app/services/work_items/export_csv_service.rb
index 9bef75e2c40..ee20a2832ce 100644
--- a/app/services/work_items/export_csv_service.rb
+++ b/app/services/work_items/export_csv_service.rb
@@ -11,24 +11,38 @@ module WorkItems
end
def email(mail_to_user)
- # TODO - will be implemented as part of https://gitlab.com/gitlab-org/gitlab/-/issues/379082
+ Notify.export_work_items_csv_email(mail_to_user, resource_parent, csv_data, csv_builder.status).deliver_now
end
private
def associations_to_preload
- [:work_item_type, :author]
+ [:project, [work_item_type: :enabled_widget_definitions], :author]
end
def header_to_value_hash
{
'Id' => 'iid',
'Title' => 'title',
+ 'Description' => ->(work_item) { get_widget_value_for(work_item, :description) },
'Type' => ->(work_item) { work_item.work_item_type.name },
'Author' => 'author_name',
'Author Username' => ->(work_item) { work_item.author.username },
'Created At (UTC)' => ->(work_item) { work_item.created_at.to_s(:csv) }
}
end
+
+ def get_widget_value_for(work_item, field)
+ widget_name = field_to_widget_map[field]
+ widget = work_item.get_widget(widget_name)
+
+ widget.try(field)
+ end
+
+ def field_to_widget_map
+ {
+ description: :description
+ }
+ end
end
end
diff --git a/app/services/work_items/import_csv_service.rb b/app/services/work_items/import_csv_service.rb
new file mode 100644
index 00000000000..e7043cc882a
--- /dev/null
+++ b/app/services/work_items/import_csv_service.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+module WorkItems
+ class ImportCsvService < ImportCsv::BaseService
+ extend ::Gitlab::Utils::Override
+
+ FeatureNotAvailableError = StandardError.new(
+ 'This feature is currently behind a feature flag and it is not available.'
+ )
+ NotAuthorizedError = StandardError.new('You do not have permission to import work items in this project.')
+
+ override :initialize
+ def initialize(*args)
+ super
+
+ @type_errors = {
+ blank: [],
+ missing: {},
+ disallowed: {}
+ }
+ end
+
+ def self.required_headers
+ %w[title type].freeze
+ end
+
+ def execute
+ raise FeatureNotAvailableError if ::Feature.disabled?(:import_export_work_items_csv, project)
+ raise NotAuthorizedError unless Ability.allowed?(user, :import_work_items, project)
+
+ super
+ end
+
+ def email_results_to_user
+ Notify.import_work_items_csv_email(user.id, project.id, results).deliver_later
+ end
+
+ private
+
+ attr_accessor :type_errors
+
+ def create_object(attributes)
+ super[:work_item]
+ end
+
+ def create_object_class
+ ::WorkItems::CreateService
+ end
+
+ override :attributes_for
+ def attributes_for(row)
+ {
+ title: row[:title],
+ work_item_type: match_work_item_type(row[:type])
+ }
+ end
+
+ override :validate_headers_presence!
+ def validate_headers_presence!(headers)
+ required_headers = self.class.required_headers
+
+ headers.downcase!
+ return if headers && required_headers.all? { |rh| headers.include?(rh) }
+
+ required_headers_message = "Required headers are missing. Required headers are #{required_headers.join(', ')}"
+ raise CSV::MalformedCSVError.new(required_headers_message, 1)
+ end
+
+ def match_work_item_type(work_item_type)
+ match = available_work_item_types[work_item_type&.downcase]
+ match[:type] if match
+ end
+
+ def available_work_item_types
+ {
+ issue: {
+ allowed: Ability.allowed?(user, :create_issue, project),
+ type: WorkItems::Type.default_by_type(:issue)
+ }
+ }.with_indifferent_access
+ end
+ strong_memoize_attr :available_work_item_types
+
+ def preprocess!
+ with_csv_lines.each do |row, line_no|
+ work_item_type = row[:type]&.strip&.downcase
+
+ if work_item_type.blank?
+ type_errors[:blank] << line_no
+ elsif missing?(work_item_type)
+ # does this work item exist in the range of work items we support?
+ (type_errors[:missing][work_item_type] ||= []) << line_no
+ elsif !allowed?(work_item_type)
+ (type_errors[:disallowed][work_item_type] ||= []) << line_no
+ end
+ end
+
+ return if type_errors[:blank].empty? &&
+ type_errors[:missing].blank? &&
+ type_errors[:disallowed].blank?
+
+ results[:type_errors] = type_errors
+ raise PreprocessError
+ end
+
+ def missing?(work_item_type_name)
+ !available_work_item_types.key?(work_item_type_name)
+ end
+
+ def allowed?(work_item_type_name)
+ !!available_work_item_types[work_item_type_name][:allowed]
+ end
+ end
+end
+
+WorkItems::ImportCsvService.prepend_mod
diff --git a/app/services/work_items/parent_links/base_service.rb b/app/services/work_items/parent_links/base_service.rb
new file mode 100644
index 00000000000..6f22e09a3fc
--- /dev/null
+++ b/app/services/work_items/parent_links/base_service.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module ParentLinks
+ class BaseService < IssuableLinks::CreateService
+ extend ::Gitlab::Utils::Override
+
+ private
+
+ def set_parent(issuable, work_item)
+ link = WorkItems::ParentLink.for_work_item(work_item)
+ link.work_item_parent = issuable
+ link
+ end
+
+ def create_notes(work_item)
+ SystemNoteService.relate_work_item(issuable, work_item, current_user)
+ end
+
+ def linkable_issuables(work_items)
+ @linkable_issuables ||= if can_admin_link?(issuable)
+ work_items.select { |work_item| linkable?(work_item) }
+ else
+ []
+ end
+ end
+
+ def linkable?(work_item)
+ can_admin_link?(work_item) && previous_related_issuables.exclude?(work_item)
+ end
+
+ def can_admin_link?(work_item)
+ can?(current_user, :admin_parent_link, work_item)
+ end
+
+ override :previous_related_issuables
+ def previous_related_issuables
+ @previous_related_issuables ||= issuable.work_item_children.to_a
+ end
+
+ override :target_issuable_type
+ def target_issuable_type
+ 'work item'
+ end
+
+ override :issuables_not_found_message
+ def issuables_not_found_message
+ format(_('No matching %{issuable} found. Make sure that you are adding a valid %{issuable} ID.'),
+ issuable: target_issuable_type)
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/parent_links/create_service.rb b/app/services/work_items/parent_links/create_service.rb
index 288ca152f93..4747d2f17e4 100644
--- a/app/services/work_items/parent_links/create_service.rb
+++ b/app/services/work_items/parent_links/create_service.rb
@@ -2,57 +2,34 @@
module WorkItems
module ParentLinks
- class CreateService < IssuableLinks::CreateService
+ class CreateService < WorkItems::ParentLinks::BaseService
private
- # rubocop: disable CodeReuse/ActiveRecord
+ override :relate_issuables
def relate_issuables(work_item)
- link = WorkItems::ParentLink.find_or_initialize_by(work_item: work_item)
- link.work_item_parent = issuable
+ link = set_parent(issuable, work_item)
+
+ link.move_to_end
if link.changed? && link.save
- create_notes(work_item)
+ relate_child_note = create_notes(work_item)
+
+ ResourceLinkEvent.create(
+ user: current_user,
+ work_item: link.work_item_parent,
+ child_work_item: link.work_item,
+ action: ResourceLinkEvent.actions[:add],
+ system_note_metadata_id: relate_child_note&.system_note_metadata&.id
+ )
end
link
end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def linkable_issuables(work_items)
- @linkable_issuables ||= begin
- return [] unless can?(current_user, :admin_parent_link, issuable)
-
- work_items.select do |work_item|
- linkable?(work_item)
- end
- end
- end
-
- def linkable?(work_item)
- can?(current_user, :admin_parent_link, work_item) &&
- !previous_related_issuables.include?(work_item)
- end
-
- def previous_related_issuables
- @related_issues ||= issuable.work_item_children.to_a
- end
+ override :extract_references
def extract_references
params[:issuable_references]
end
-
- def create_notes(work_item)
- SystemNoteService.relate_work_item(issuable, work_item, current_user)
- end
-
- def target_issuable_type
- 'work item'
- end
-
- def issuables_not_found_message
- _('No matching %{issuable} found. Make sure that you are adding a valid %{issuable} ID.' %
- { issuable: target_issuable_type })
- end
end
end
end
diff --git a/app/services/work_items/parent_links/destroy_service.rb b/app/services/work_items/parent_links/destroy_service.rb
index 19770b3e4b5..97145d0b360 100644
--- a/app/services/work_items/parent_links/destroy_service.rb
+++ b/app/services/work_items/parent_links/destroy_service.rb
@@ -15,7 +15,15 @@ module WorkItems
private
def create_notes
- SystemNoteService.unrelate_work_item(parent, child, current_user)
+ unrelate_note = SystemNoteService.unrelate_work_item(parent, child, current_user)
+
+ ResourceLinkEvent.create(
+ user: @current_user,
+ work_item: @link.work_item_parent,
+ child_work_item: @link.work_item,
+ action: ResourceLinkEvent.actions[:remove],
+ system_note_metadata_id: unrelate_note&.system_note_metadata&.id
+ )
end
def not_found_message
diff --git a/app/services/work_items/parent_links/reorder_service.rb b/app/services/work_items/parent_links/reorder_service.rb
new file mode 100644
index 00000000000..0ee650bd8ab
--- /dev/null
+++ b/app/services/work_items/parent_links/reorder_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module ParentLinks
+ class ReorderService < WorkItems::ParentLinks::BaseService
+ private
+
+ override :relate_issuables
+ def relate_issuables(work_item)
+ notes_are_expected = work_item.work_item_parent != issuable
+ link = set_parent(issuable, work_item)
+ reorder(link, params[:adjacent_work_item], params[:relative_position])
+
+ create_notes(work_item) if link.save && notes_are_expected
+
+ link
+ end
+
+ def reorder(link, adjacent_work_item, relative_position)
+ WorkItems::ParentLink.move_nulls_to_end(RelativePositioning.mover.context(link).relative_siblings)
+
+ link.move_before(adjacent_work_item.parent_link) if relative_position == 'BEFORE'
+ link.move_after(adjacent_work_item.parent_link) if relative_position == 'AFTER'
+ end
+
+ override :render_conflict_error?
+ def render_conflict_error?
+ return false if params[:adjacent_work_item] && params[:relative_position]
+
+ super
+ end
+
+ override :linkable?
+ def linkable?(work_item)
+ can_admin_link?(work_item)
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/prepare_import_csv_service.rb b/app/services/work_items/prepare_import_csv_service.rb
new file mode 100644
index 00000000000..a331b2870f4
--- /dev/null
+++ b/app/services/work_items/prepare_import_csv_service.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module WorkItems
+ class PrepareImportCsvService < Import::PrepareService
+ extend ::Gitlab::Utils::Override
+
+ private
+
+ override :worker
+ def worker
+ ImportWorkItemsCsvWorker
+ end
+
+ override :success_message
+ def success_message
+ _("Your work items are being imported. Once finished, you'll receive a confirmation email.")
+ end
+ end
+end
diff --git a/app/services/work_items/update_service.rb b/app/services/work_items/update_service.rb
index d4acadbc851..defdeebfed8 100644
--- a/app/services/work_items/update_service.rb
+++ b/app/services/work_items/update_service.rb
@@ -2,6 +2,7 @@
module WorkItems
class UpdateService < ::Issues::UpdateService
+ extend Gitlab::Utils::Override
include WidgetableService
def initialize(container:, current_user: nil, params: {}, spam_params: nil, widget_params: {})
@@ -26,6 +27,15 @@ module WorkItems
private
+ override :handle_quick_actions
+ def handle_quick_actions(work_item)
+ # Do not handle quick actions unless the work item is the default Issue.
+ # The available quick actions for a work item depend on its type and widgets.
+ return unless work_item.work_item_type.default_issue?
+
+ super
+ end
+
def prepare_update_params(work_item)
execute_widgets(
work_item: work_item,
diff --git a/app/services/work_items/widgets/assignees_service/update_service.rb b/app/services/work_items/widgets/assignees_service/update_service.rb
index 9176b71c85e..7a084917ea7 100644
--- a/app/services/work_items/widgets/assignees_service/update_service.rb
+++ b/app/services/work_items/widgets/assignees_service/update_service.rb
@@ -5,6 +5,8 @@ module WorkItems
module AssigneesService
class UpdateService < WorkItems::Widgets::BaseService
def before_update_in_transaction(params:)
+ params[:assignee_ids] = [] if new_type_excludes_widget?
+
return unless params.present? && params.has_key?(:assignee_ids)
return unless has_permission?(:set_work_item_metadata)
diff --git a/app/services/work_items/widgets/award_emoji_service/update_service.rb b/app/services/work_items/widgets/award_emoji_service/update_service.rb
new file mode 100644
index 00000000000..7c58c0c9af9
--- /dev/null
+++ b/app/services/work_items/widgets/award_emoji_service/update_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module AwardEmojiService
+ class UpdateService < WorkItems::Widgets::BaseService
+ def before_update_in_transaction(params:)
+ return unless params.present? && params.key?(:name) && params.key?(:action)
+ return unless has_permission?(:award_emoji)
+
+ service_response!(service_result(params[:action], params[:name]))
+ end
+
+ private
+
+ def service_result(action, name)
+ class_name = {
+ add: ::AwardEmojis::AddService,
+ remove: ::AwardEmojis::DestroyService
+ }
+
+ return invalid_action_error(action) unless class_name.key?(action)
+
+ class_name[action].new(work_item, name, current_user).execute
+ end
+
+ def invalid_action_error(key)
+ error(format(_("%{key} is not a valid action."), key: key))
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/base_service.rb b/app/services/work_items/widgets/base_service.rb
index 1ff03a09f9f..cae6ed7646f 100644
--- a/app/services/work_items/widgets/base_service.rb
+++ b/app/services/work_items/widgets/base_service.rb
@@ -16,9 +16,21 @@ module WorkItems
private
+ def new_type_excludes_widget?
+ return false unless service_params[:work_item_type]
+
+ service_params[:work_item_type].widgets.exclude?(@widget.class)
+ end
+
def has_permission?(permission)
can?(current_user, permission, widget.work_item)
end
+
+ def service_response!(result)
+ return result unless result[:status] == :error
+
+ raise WidgetError, result[:message]
+ end
end
end
end
diff --git a/app/services/work_items/widgets/current_user_todos_service/update_service.rb b/app/services/work_items/widgets/current_user_todos_service/update_service.rb
new file mode 100644
index 00000000000..38e2ae4de32
--- /dev/null
+++ b/app/services/work_items/widgets/current_user_todos_service/update_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module CurrentUserTodosService
+ class UpdateService < WorkItems::Widgets::BaseService
+ def before_update_in_transaction(params:)
+ return unless params.present? && params.key?(:action)
+
+ case params[:action]
+ when "add"
+ add_todo
+ when "mark_as_done"
+ mark_as_done(params[:todo_id])
+ end
+ end
+
+ private
+
+ def add_todo
+ return unless has_permission?(:create_todo)
+
+ TodoService.new.mark_todo(work_item, current_user)&.first
+ end
+
+ def mark_as_done(todo_id)
+ todos = TodosFinder.new(current_user, state: :pending, target_id: work_item.id).execute
+ todos = todo_id ? todos.id_in(todo_id) : todos
+
+ return if todos.empty?
+
+ TodoService.new.resolve_todos(todos, current_user, resolved_by_action: :api_done)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/description_service/update_service.rb b/app/services/work_items/widgets/description_service/update_service.rb
index fe591ba605e..2640c6132cd 100644
--- a/app/services/work_items/widgets/description_service/update_service.rb
+++ b/app/services/work_items/widgets/description_service/update_service.rb
@@ -5,6 +5,8 @@ module WorkItems
module DescriptionService
class UpdateService < WorkItems::Widgets::BaseService
def before_update_callback(params: {})
+ params[:description] = nil if new_type_excludes_widget?
+
return unless params.present? && params.key?(:description)
return unless has_permission?(:update_work_item)
diff --git a/app/services/work_items/widgets/hierarchy_service/base_service.rb b/app/services/work_items/widgets/hierarchy_service/base_service.rb
index 236762d6937..45393eab58c 100644
--- a/app/services/work_items/widgets/hierarchy_service/base_service.rb
+++ b/app/services/work_items/widgets/hierarchy_service/base_service.rb
@@ -63,9 +63,7 @@ module WorkItems
work_item.reload_work_item_parent
work_item.work_item_children.reset
- return result unless result[:status] == :error
-
- raise WidgetError, result[:message]
+ super
end
end
end
diff --git a/app/services/work_items/widgets/hierarchy_service/update_service.rb b/app/services/work_items/widgets/hierarchy_service/update_service.rb
index 48b540f919e..00b45c04ffa 100644
--- a/app/services/work_items/widgets/hierarchy_service/update_service.rb
+++ b/app/services/work_items/widgets/hierarchy_service/update_service.rb
@@ -4,10 +4,68 @@ module WorkItems
module Widgets
module HierarchyService
class UpdateService < WorkItems::Widgets::HierarchyService::BaseService
+ INVALID_RELATIVE_POSITION_ERROR = 'Relative position is not valid.'
+ CHILDREN_REORDERING_ERROR = 'Relative position cannot be combined with childrenIds.'
+ UNRELATED_ADJACENT_HIERARCHY_ERROR = 'The adjacent work item\'s parent must match the new parent work item.'
+ INVALID_ADJACENT_PARENT_ERROR = 'The adjacent work item\'s parent must match the current parent work item.'
+
def before_update_in_transaction(params:)
return unless params.present?
- service_response!(handle_hierarchy_changes(params))
+ if positioning?(params)
+ service_response!(handle_positioning(params))
+ else
+ service_response!(handle_hierarchy_changes(params))
+ end
+ end
+
+ private
+
+ def handle_positioning(params)
+ validate_positioning!(params)
+
+ arguments = {
+ target_issuable: work_item,
+ adjacent_work_item: params.delete(:adjacent_work_item),
+ relative_position: params.delete(:relative_position)
+ }
+ work_item_parent = params.delete(:parent) || work_item.work_item_parent
+ ::WorkItems::ParentLinks::ReorderService.new(work_item_parent, current_user, arguments).execute
+ end
+
+ def positioning?(params)
+ params[:relative_position].present? || params[:adjacent_work_item].present?
+ end
+
+ def error!(message)
+ service_response!(error(_(message)))
+ end
+
+ def validate_positioning!(params)
+ error!(INVALID_RELATIVE_POSITION_ERROR) if incomplete_relative_position?(params)
+ error!(CHILDREN_REORDERING_ERROR) if positioning_children?(params)
+ error!(UNRELATED_ADJACENT_HIERARCHY_ERROR) if unrelated_adjacent_hierarchy?(params)
+ error!(INVALID_ADJACENT_PARENT_ERROR) if invalid_adjacent_parent?(params)
+ end
+
+ def positioning_children?(params)
+ params.key?(:children)
+ end
+
+ def incomplete_relative_position?(params)
+ params[:adjacent_work_item].blank? || params[:relative_position].blank?
+ end
+
+ def unrelated_adjacent_hierarchy?(params)
+ return false if params[:parent].blank?
+
+ params[:parent] != params[:adjacent_work_item].work_item_parent
+ end
+
+ def invalid_adjacent_parent?(params)
+ return false if params[:parent].present?
+
+ work_item.work_item_parent != params[:adjacent_work_item].work_item_parent
end
end
end
diff --git a/app/services/work_items/widgets/labels_service/update_service.rb b/app/services/work_items/widgets/labels_service/update_service.rb
index f00ea5c95ca..b880398677d 100644
--- a/app/services/work_items/widgets/labels_service/update_service.rb
+++ b/app/services/work_items/widgets/labels_service/update_service.rb
@@ -5,6 +5,11 @@ module WorkItems
module LabelsService
class UpdateService < WorkItems::Widgets::BaseService
def prepare_update_params(params: {})
+ if new_type_excludes_widget?
+ params[:remove_label_ids] = @work_item.labels.map(&:id)
+ params[:add_label_ids] = []
+ end
+
return if params.blank?
service_params.merge!(params.slice(:add_label_ids, :remove_label_ids))
diff --git a/app/services/work_items/widgets/milestone_service/base_service.rb b/app/services/work_items/widgets/milestone_service/base_service.rb
deleted file mode 100644
index f373e6daea3..00000000000
--- a/app/services/work_items/widgets/milestone_service/base_service.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-module WorkItems
- module Widgets
- module MilestoneService
- class BaseService < WorkItems::Widgets::BaseService
- private
-
- def handle_milestone_change(params:)
- return unless params.present? && params.key?(:milestone_id)
-
- unless has_permission?(:set_work_item_metadata)
- params.delete(:milestone_id)
- return
- end
-
- if params[:milestone_id].nil?
- work_item.milestone = nil
-
- return
- end
-
- project = work_item.project
- milestone = MilestonesFinder.new({
- project_ids: [project.id],
- group_ids: project.group&.self_and_ancestors&.select(:id),
- ids: [params[:milestone_id]]
- }).execute.first
-
- if milestone
- work_item.milestone = milestone
- else
- params.delete(:milestone_id)
- end
- end
- end
- end
- end
-end
diff --git a/app/services/work_items/widgets/milestone_service/create_service.rb b/app/services/work_items/widgets/milestone_service/create_service.rb
deleted file mode 100644
index e8d6bfe503c..00000000000
--- a/app/services/work_items/widgets/milestone_service/create_service.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module WorkItems
- module Widgets
- module MilestoneService
- class CreateService < WorkItems::Widgets::MilestoneService::BaseService
- def before_create_callback(params:)
- handle_milestone_change(params: params)
- end
- end
- end
- end
-end
diff --git a/app/services/work_items/widgets/milestone_service/update_service.rb b/app/services/work_items/widgets/milestone_service/update_service.rb
deleted file mode 100644
index 7ff0c2a5367..00000000000
--- a/app/services/work_items/widgets/milestone_service/update_service.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-module WorkItems
- module Widgets
- module MilestoneService
- class UpdateService < WorkItems::Widgets::MilestoneService::BaseService
- def before_update_callback(params:)
- handle_milestone_change(params: params)
- end
- end
- end
- end
-end
diff --git a/app/services/work_items/widgets/notifications_service/update_service.rb b/app/services/work_items/widgets/notifications_service/update_service.rb
new file mode 100644
index 00000000000..b301e2ca7db
--- /dev/null
+++ b/app/services/work_items/widgets/notifications_service/update_service.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ module NotificationsService
+ class UpdateService < WorkItems::Widgets::BaseService
+ def before_update_in_transaction(params:)
+ return unless params.present? && params.key?(:subscribed)
+ return unless has_permission?(:update_subscription)
+
+ update_subscription(work_item, params)
+ end
+
+ private
+
+ def update_subscription(work_item, subscription_params)
+ work_item.set_subscription(
+ current_user,
+ subscription_params[:subscribed],
+ work_item.project
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/work_items/widgets/start_and_due_date_service/update_service.rb b/app/services/work_items/widgets/start_and_due_date_service/update_service.rb
index 6a5dc0d5ef3..0dbf3aa31d9 100644
--- a/app/services/work_items/widgets/start_and_due_date_service/update_service.rb
+++ b/app/services/work_items/widgets/start_and_due_date_service/update_service.rb
@@ -5,6 +5,8 @@ module WorkItems
module StartAndDueDateService
class UpdateService < WorkItems::Widgets::BaseService
def before_update_callback(params: {})
+ return widget.work_item.assign_attributes({ start_date: nil, due_date: nil }) if new_type_excludes_widget?
+
return if params.blank?
widget.work_item.assign_attributes(params.slice(:start_date, :due_date))